{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3712","id":1134252505,"node_id":"PR_kwDODunzps4ynVYy","number":3712,"title":"Fix the error of msr_sqa dataset","user":{"login":"Timothyxxx","id":47296835,"node_id":"MDQ6VXNlcjQ3Mjk2ODM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47296835?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Timothyxxx","html_url":"https:\/\/github.com\/Timothyxxx","followers_url":"https:\/\/api.github.com\/users\/Timothyxxx\/followers","following_url":"https:\/\/api.github.com\/users\/Timothyxxx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Timothyxxx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Timothyxxx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Timothyxxx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Timothyxxx\/orgs","repos_url":"https:\/\/api.github.com\/users\/Timothyxxx\/repos","events_url":"https:\/\/api.github.com\/users\/Timothyxxx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Timothyxxx\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-12T16:27:54Z","updated_at":"2022-02-12T16:27:54Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3712","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3712","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3712.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3712.patch","merged_at":null},"body":"Fix the error of _load_table_data function in msr_sqa dataset, it is wrong to use comma to split each row.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3711","id":1134050545,"node_id":"PR_kwDODunzps4ymmlK","number":3711,"title":"Fix the error of _load_table_data function in msr_sqa dataset","user":{"login":"Timothyxxx","id":47296835,"node_id":"MDQ6VXNlcjQ3Mjk2ODM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47296835?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Timothyxxx","html_url":"https:\/\/github.com\/Timothyxxx","followers_url":"https:\/\/api.github.com\/users\/Timothyxxx\/followers","following_url":"https:\/\/api.github.com\/users\/Timothyxxx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Timothyxxx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Timothyxxx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Timothyxxx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Timothyxxx\/orgs","repos_url":"https:\/\/api.github.com\/users\/Timothyxxx\/repos","events_url":"https:\/\/api.github.com\/users\/Timothyxxx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Timothyxxx\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-12T13:20:53Z","updated_at":"2022-02-12T13:30:43Z","closed_at":"2022-02-12T13:30:43Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3711","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3711","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3711.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3711.patch","merged_at":null},"body":"The _load_table_data function from the last version is wrong, it is wrong to use comma to split each row.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3710","id":1133955393,"node_id":"PR_kwDODunzps4ymQMQ","number":3710,"title":"Fix CI code quality issue","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-12T12:05:39Z","updated_at":"2022-02-12T12:58:05Z","closed_at":"2022-02-12T12:58:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3710","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3710","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3710.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3710.patch","merged_at":"2022-02-12T12:58:04Z"},"body":"Fix CI code quality issue introduced by #3695.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3709","id":1132997904,"node_id":"PR_kwDODunzps4yi0J4","number":3709,"title":"Set base path to hub url for canonical datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-11T19:23:20Z","updated_at":"2022-02-11T19:23:20Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3709","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3709","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3709.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3709.patch","merged_at":null},"body":"This should allow canonical datasets to use relative paths to download data files from the Hub\r\n\r\ncc @polinaeterna this will be useful if we have audio datasets that are canonical and for which you'd like to host data files","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3708","id":1132968402,"node_id":"I_kwDODunzps5Dh7nS","number":3708,"title":"Loading JSON gets stuck with many workers\/threads","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-11T18:50:48Z","updated_at":"2022-02-11T20:57:53Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nLoading a JSON dataset with `load_dataset` can get stuck when running on a machine with many CPUs. This is especially an issue when loading a large dataset on a large machine. \r\n\r\n\r\n## Steps to reproduce the bug\r\nI originally created the following script to reproduce the issue:\r\n```python\r\nfrom datasets import load_dataset\r\nfrom multiprocessing import Process\r\nfrom tqdm import tqdm\r\nimport datasets\r\nfrom transformers import set_seed\r\n\r\ndef run_tasks_in_parallel(tasks, ds_list):\r\n for _ in tqdm(range(1000)):\r\n print('new batch')\r\n running_tasks = [Process(target=task, args=(ds, i)) for i, (task, ds) in enumerate(zip(tasks, ds_list))]\r\n for running_task in running_tasks:\r\n running_task.start()\r\n for running_task in running_tasks:\r\n running_task.join()\r\n\r\ndef get_dataset():\r\n dataset_name = 'transformersbook\/codeparrot'\r\n ds = load_dataset(dataset_name+'-train', split=\"train\", streaming=True)\r\n ds = ds.shuffle(buffer_size=1000, seed=1)\r\n return iter(ds)\r\n\r\ndef get_next_element(ds, process_id, N=10000):\r\n for _ in range(N):\r\n _ = next(ds)['content']\r\n print(f'process {process_id} done')\r\n return\r\n\r\nset_seed(1)\r\ndatasets.utils.logging.set_verbosity_debug()\r\n\r\nn_processes = 8\r\ntasks = [get_next_element for _ in range(n_processes)]\r\nargs = [get_dataset() for _ in range(n_processes)]\r\nrun_tasks_in_parallel(tasks, args)\r\n```\r\n\r\nToday I noticed that it can happen when running it on a single process on a machine with many cores without streaming. So just `load_dataset(\"transformersbook\/codeparrot-train\")` alone might cause the issue after waiting long enough or trying many times. It's a slightly random process which makes it especially hard to track down. When I encountered it today it had already processed 17GB of data (the size of the cache folder when it got stuck) before getting stuck.\r\n\r\nHere's my current understanding of the error. As far as I can tell it happens in the following block: https:\/\/github.com\/huggingface\/datasets\/blob\/be701e9e89ab38022612c7263edc015bc7feaff9\/src\/datasets\/packaged_modules\/json\/json.py#L119-L139\r\n\r\nWhen the try on line 121 fails and the `block_size` is increased it can happen that it can't read the JSON again and gets stuck indefinitely. A hint that points in that direction is that increasing the `chunksize` argument decreases the chance of getting stuck and vice versa. Maybe it is an issue with a lock on the file that is not properly released.\r\n\r\n## Expected results\r\nRead a JSON before the end of the universe.\r\n\r\n## Actual results\r\nRead a JSON not before the end of the universe.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.3\r\n- Platform: Linux-4.19.0-18-cloud-amd64-x86_64-with-glibc2.28\r\n- Python version: 3.9.10\r\n- PyArrow version: 7.0.0\r\n\r\n@lhoestq we dicsussed this a while ago. @albertvillanova we discussed this today :) \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3707","id":1132741903,"node_id":"I_kwDODunzps5DhEUP","number":3707,"title":"`.select`: unexpected behavior with `indices`","user":{"login":"gabegma","id":36087158,"node_id":"MDQ6VXNlcjM2MDg3MTU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36087158?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gabegma","html_url":"https:\/\/github.com\/gabegma","followers_url":"https:\/\/api.github.com\/users\/gabegma\/followers","following_url":"https:\/\/api.github.com\/users\/gabegma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gabegma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gabegma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gabegma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gabegma\/orgs","repos_url":"https:\/\/api.github.com\/users\/gabegma\/repos","events_url":"https:\/\/api.github.com\/users\/gabegma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gabegma\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-11T15:20:01Z","updated_at":"2022-02-11T20:53:53Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe `.select` method will not throw when sending `indices` bigger than the dataset length; `indices` will be wrapped instead. This behavior is not documented anywhere, and is not intuitive. \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import Dataset\r\nds = Dataset.from_dict({\"text\": [\"d\", \"e\", \"f\"], \"label\": [4, 5, 6]})\r\nres1 = ds.select([1, 2, 3])['text']\r\nres2 = ds.select([1000])['text']\r\n```\r\n\r\n## Expected results\r\nBoth results should throw an `Error`.\r\n\r\n## Actual results\r\n`res1` will give `['e', 'f', 'd']`\r\n`res2` will give `['e']`\r\n\r\n## Environment info\r\nBug found from this environment:\r\n- `datasets` version: 1.16.1\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.7\r\n- PyArrow version: 6.0.1\r\n\r\nIt was also replicated on `master`.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3706","id":1132218874,"node_id":"I_kwDODunzps5DfEn6","number":3706,"title":"Unable to load dataset 'big_patent'","user":{"login":"ankitk2109","id":26432753,"node_id":"MDQ6VXNlcjI2NDMyNzUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26432753?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ankitk2109","html_url":"https:\/\/github.com\/ankitk2109","followers_url":"https:\/\/api.github.com\/users\/ankitk2109\/followers","following_url":"https:\/\/api.github.com\/users\/ankitk2109\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ankitk2109\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ankitk2109\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ankitk2109\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ankitk2109\/orgs","repos_url":"https:\/\/api.github.com\/users\/ankitk2109\/repos","events_url":"https:\/\/api.github.com\/users\/ankitk2109\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ankitk2109\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-02-11T09:48:34Z","updated_at":"2022-02-11T14:28:20Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nUnable to load the \"big_patent\" dataset\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset('big_patent', 'd', 'validation')\r\n```\r\n\r\n## Expected results\r\nDownload big_patents' validation split from the 'd' subset\r\n\r\n## Getting an error saying:\r\n{FileNotFoundError}Local file ..\\huggingface\\datasets\\downloads\\6159313604f4f2c01e7d1cac52139343b6c07f73f6de348d09be6213478455c5\\bigPatentData\\train.tar.gz doesn't exist\r\n\r\n## Environment info\r\n\r\n- `datasets` version:1.18.3\r\n- Platform: Windows\r\n- Python version:3.8\r\n- PyArrow version:7.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3705","id":1132053226,"node_id":"PR_kwDODunzps4yfhyj","number":3705,"title":"Raise informative error when loading a save_to_disk dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-11T08:21:03Z","updated_at":"2022-02-11T22:56:40Z","closed_at":"2022-02-11T22:56:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3705","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3705","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3705.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3705.patch","merged_at":"2022-02-11T22:56:39Z"},"body":"People recurrently report error when trying to load a dataset (using `load_dataset`) that was previously saved using `save_to_disk`.\r\n\r\nThis PR raises an informative error message telling them they should use `load_from_disk` instead.\r\n\r\nClose #3700.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3704","id":1132042631,"node_id":"I_kwDODunzps5DeZmH","number":3704,"title":"OSCAR-2109 datasets are misaligned and truncated","user":{"login":"adrianeboyd","id":5794899,"node_id":"MDQ6VXNlcjU3OTQ4OTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5794899?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adrianeboyd","html_url":"https:\/\/github.com\/adrianeboyd","followers_url":"https:\/\/api.github.com\/users\/adrianeboyd\/followers","following_url":"https:\/\/api.github.com\/users\/adrianeboyd\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adrianeboyd\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adrianeboyd\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adrianeboyd\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adrianeboyd\/orgs","repos_url":"https:\/\/api.github.com\/users\/adrianeboyd\/repos","events_url":"https:\/\/api.github.com\/users\/adrianeboyd\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adrianeboyd\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-02-11T08:14:59Z","updated_at":"2022-02-11T10:41:41Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nThe `oscar-corpus\/OSCAR-2109` data appears to be misaligned and truncated by the dataset builder for subsets that contain more than one part and for cases where the texts contain non-unix newlines.\r\n\r\n## Steps to reproduce the bug\r\n\r\nA few examples, although I'm not sure how deterministic the particular (mis)alignment is in various configurations:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"oscar-corpus\/OSCAR-2109\", \"deduplicated_fi\", split=\"train\", use_auth_token=True)\r\nentry = dataset[0]\r\n# entry[\"text\"] is from fi_part_3.txt.gz\r\n# entry[\"meta\"] is from fi_meta_part_2.jsonl.gz\r\n\r\ndataset = load_dataset(\"oscar-corpus\/OSCAR-2109\", \"deduplicated_no\", split=\"train\", use_auth_token=True)\r\nentry = dataset[900000]\r\n# entry[\"text\"] is from no_part_3.txt.gz and contains a blank line\r\n# entry[\"meta\"] is from no_meta_part_1.jsonl.gz\r\n\r\ndataset = load_dataset(\"oscar-corpus\/OSCAR-2109\", \"deduplicated_mk\", split=\"train\", streaming=True, use_auth_token=True)\r\n# 9088 texts in the dataset are empty\r\n```\r\n\r\nFor `deduplicated_fi`, all exported raw texts from the dataset are 17GB rather than 20GB as reported in the data splits overview table. The token count with `wc -w` for the raw texts is 2,067,556,874 rather than the expected 2,357,264,196 from the data splits table.\r\n\r\nFor `deduplicated_no` all exported raw texts contain 624,040,887 rather than the expected 776,354,517 tokens.\r\n\r\nFor `deduplicated_mk` it is 122,236,936 rather than 134,544,934 tokens. \r\n\r\nI'm not expecting the `wc -w` counts to line up exactly with the data splits table, but for comparison the `wc -w` count for `deduplicated_mk` on the raw texts is 134,545,424.\r\n\r\n## Issues\r\n\r\n* The meta \/ text files are not paired correctly when loading, so the extracted texts do not have the right offsets, the metadata is not associated with the correct text, and the text files may not be processed to the end or may be processed beyond the end (empty texts).\r\n* The line count offset is not reset per file so the texts aren't aligned to the right offsets in any parts beyond the first part, leading to truncation when in effect blank lines are not skipped.\r\n* Non-unix newline characters are treated as newlines when reading the text files while the metadata only counts unix newlines for its line offsets, leading to further misalignments between the metadata and the extracted texts, and which also results in truncation.\r\n\r\n## Expected results\r\n\r\nAll texts from the OSCAR release are extracted according to the metadata and aligned with the correct metadata.\r\n\r\n## Fixes\r\n\r\nNot necessarily the exact fixes\/checks you may want to use (I didn't test all languages or do any cross-platform testing, I'm not sure all the details are compatible with streaming), however to highlight the issues:\r\n\r\n```diff\r\ndiff --git a\/OSCAR-2109.py b\/OSCAR-2109.py\r\nindex bbac1076..5eee8de7 100644\r\n--- a\/OSCAR-2109.py\r\n+++ b\/OSCAR-2109.py\r\n@@ -20,6 +20,7 @@\r\n import collections\r\n import gzip\r\n import json\r\n+import os\r\n \r\n import datasets\r\n \r\n@@ -387,9 +388,20 @@ class Oscar2109(datasets.GeneratorBasedBuilder):\r\n with open(checksum_file, encoding=\"utf-8\") as f:\r\n data_filenames = [line.split()[1] for line in f if line]\r\n data_urls = [self.config.base_data_path + data_filename for data_filename in data_filenames]\r\n- text_files = dl_manager.download([url for url in data_urls if url.endswith(\".txt.gz\")])\r\n- metadata_files = dl_manager.download([url for url in data_urls if url.endswith(\".jsonl.gz\")])\r\n+ # sort filenames so corresponding parts are aligned\r\n+ text_files = sorted(dl_manager.download([url for url in data_urls if url.endswith(\".txt.gz\")]))\r\n+ metadata_files = sorted(dl_manager.download([url for url in data_urls if url.endswith(\".jsonl.gz\")]))\r\n+ assert len(text_files) == len(metadata_files)\r\n metadata_and_text_files = list(zip(metadata_files, text_files))\r\n+ for meta_path, text_path in metadata_and_text_files:\r\n+ # check that meta\/text part numbers are the same\r\n+ if \"part\" in os.path.basename(text_path):\r\n+ assert (\r\n+ os.path.basename(text_path).replace(\".txt.gz\", \"\").split(\"_\")[-1]\r\n+ == os.path.basename(meta_path).replace(\".jsonl.gz\", \"\").split(\"_\")[-1]\r\n+ )\r\n+ else:\r\n+ assert len(metadata_and_text_files) == 1\r\n return [\r\n datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={\"metadata_and_text_files\": metadata_and_text_files}),\r\n ]\r\n@@ -397,10 +409,14 @@ class Oscar2109(datasets.GeneratorBasedBuilder):\r\n def _generate_examples(self, metadata_and_text_files):\r\n \"\"\"This function returns the examples in the raw (text) form by iterating on all the files.\"\"\"\r\n id_ = 0\r\n- offset = 0\r\n for meta_path, text_path in metadata_and_text_files:\r\n+ # line offsets are per text file\r\n+ offset = 0\r\n logger.info(\"generating examples from = %s\", text_path)\r\n- with gzip.open(open(text_path, \"rb\"), \"rt\", encoding=\"utf-8\") as text_f:\r\n+ # some texts contain non-Unix newlines that should not be\r\n+ # interpreted as line breaks for the line counts in the metadata\r\n+ # with readline()\r\n+ with gzip.open(open(text_path, \"rb\"), \"rt\", encoding=\"utf-8\", newline=\"\\n\") as text_f:\r\n with gzip.open(open(meta_path, \"rb\"), \"rt\", encoding=\"utf-8\") as meta_f:\r\n for line in meta_f:\r\n # read meta\r\n@@ -411,7 +427,12 @@ class Oscar2109(datasets.GeneratorBasedBuilder):\r\n offset += 1\r\n text_f.readline()\r\n # read text\r\n- text = \"\".join([text_f.readline() for _ in range(meta[\"nb_sentences\"])]).rstrip()\r\n+ text_lines = [text_f.readline() for _ in range(meta[\"nb_sentences\"])]\r\n+ # all lines contain text (no blank lines or EOF)\r\n+ assert all(text_lines)\r\n+ assert \"\\n\" not in text_lines\r\n offset += meta[\"nb_sentences\"]\r\n+ # only strip the trailing newline\r\n+ text = \"\".join(text_lines).rstrip(\"\\n\")\r\n yield id_, {\"id\": id_, \"text\": text, \"meta\": meta}\r\n id_ += 1\r\n```\r\n\r\nI've tested this with a number of smaller deduplicated languages with 1-20 parts and the resulting datasets looked correct in terms of word count and size when compared to the data splits table and raw texts, and the text\/metadata alignments were correct in all my spot checks. However, there are many many languages I didn't test and I'm not sure that there aren't any texts containing blank lines in the corpus, for instance. For the cases I tested, the assertions related to blank lines and EOF made it easier to verify that the text and metadata were aligned as intended, since there would be little chance of spurious alignments of variable-length texts across so much data.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3703","id":1131882772,"node_id":"I_kwDODunzps5DdykU","number":3703,"title":"ImportError: To be able to use this metric, you need to install the following dependencies['seqeval'] using 'pip install seqeval' for instance'","user":{"login":"zhangyifei1","id":28425091,"node_id":"MDQ6VXNlcjI4NDI1MDkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28425091?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zhangyifei1","html_url":"https:\/\/github.com\/zhangyifei1","followers_url":"https:\/\/api.github.com\/users\/zhangyifei1\/followers","following_url":"https:\/\/api.github.com\/users\/zhangyifei1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zhangyifei1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zhangyifei1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zhangyifei1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zhangyifei1\/orgs","repos_url":"https:\/\/api.github.com\/users\/zhangyifei1\/repos","events_url":"https:\/\/api.github.com\/users\/zhangyifei1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zhangyifei1\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-11T06:38:42Z","updated_at":"2022-02-11T06:40:18Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"hi :\r\nI want to use the seqeval indicator because of direct load_ When metric ('seqeval '), it will prompt that the network connection fails. So I downloaded the seqeval Py to load locally. Loading code: metric = load_ metric(path='mymetric\/seqeval\/seqeval.py')\r\n\r\nBut tips:\r\n\r\nTraceback (most recent call last):\r\n File \"\/home\/ubuntu\/Python3.6_project\/zyf_project\/transformers\/examples\/pytorch\/token-classification\/run_ner.py\", line 604, in \r\n main()\r\n File \"\/home\/ubuntu\/Python3.6_project\/zyf_project\/transformers\/examples\/pytorch\/token-classification\/run_ner.py\", line 481, in main\r\n metric = load_metric(path='mymetric\/seqeval\/seqeval.py')\r\n File \"\/home\/ubuntu\/Python3.6_project\/zyf_project\/transformers_venv_0209\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 610, in load_metric\r\n dataset=False,\r\n File \"\/home\/ubuntu\/Python3.6_project\/zyf_project\/transformers_venv_0209\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 450, in prepare_module\r\n f\"To be able to use this {module_type}, you need to install the following dependencies\"\r\nImportError: To be able to use this metric, you need to install the following dependencies['seqeval'] using 'pip install seqeval' for instance'\r\n\r\n\r\n**What should I do? Please help me, thank you**\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3702","id":1130666707,"node_id":"PR_kwDODunzps4yahKc","number":3702,"title":"Update the address to use https","user":{"login":"yazdanbakhsh","id":7105134,"node_id":"MDQ6VXNlcjcxMDUxMzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7105134?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yazdanbakhsh","html_url":"https:\/\/github.com\/yazdanbakhsh","followers_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/followers","following_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/orgs","repos_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/repos","events_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T18:46:30Z","updated_at":"2022-02-10T18:46:30Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3702","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3702","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3702.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3702.patch","merged_at":null},"body":"The http address doesn't work anymore","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3701","id":1130498738,"node_id":"PR_kwDODunzps4yZ8Dw","number":3701,"title":"Pin ElasticSearch","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T17:15:26Z","updated_at":"2022-02-10T17:31:13Z","closed_at":"2022-02-10T17:31:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3701","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3701","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3701.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3701.patch","merged_at":"2022-02-10T17:31:12Z"},"body":"Until we manage to support ES 8.0, I'm setting the version to `<8.0.0`\r\n\r\nCurrently we're getting this error on 8.0:\r\n```python\r\nValueError: Either 'hosts' or 'cloud_id' must be specified\r\n```\r\nWhen instantiating a `Elasticsearch()` object","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3700","id":1130252496,"node_id":"I_kwDODunzps5DXkjQ","number":3700,"title":"Unable to load a dataset","user":{"login":"PaulchauvinAI","id":97964230,"node_id":"U_kgDOBdbQxg","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/97964230?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulchauvinAI","html_url":"https:\/\/github.com\/PaulchauvinAI","followers_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/followers","following_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/repos","events_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-10T15:05:53Z","updated_at":"2022-02-11T22:56:39Z","closed_at":"2022-02-11T22:56:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nUnable to load a dataset from Huggingface that I have just saved.\r\n\r\n\r\n## Steps to reproduce the bug\r\nOn Google colab\r\n`! pip install datasets `\r\n`from datasets import load_dataset`\r\n`my_path = \"wiki_dataset\"`\r\n`dataset = load_dataset('wikipedia', \"20200501.fr\")`\r\n`dataset.save_to_disk(my_path)`\r\n`dataset = load_dataset(my_path)`\r\n\r\n\r\n## Expected results\r\nLoading the dataset\r\n\r\n## Actual results\r\nValueError: Couldn't cast\r\n_data_files: list>\r\n child 0, item: struct\r\n child 0, filename: string\r\n_fingerprint: string\r\n_format_columns: null\r\n_format_kwargs: struct<>\r\n_format_type: null\r\n_indexes: struct<>\r\n_output_all_columns: bool\r\n_split: string\r\nto\r\n{'builder_name': Value(dtype='string', id=None), 'citation': Value(dtype='string', id=None), 'config_name': Value(dtype='string', id=None), 'dataset_size': Value(dtype='int64', id=None), 'description': Value(dtype='string', id=None), 'download_checksums': {}, 'download_size': Value(dtype='int64', id=None), 'features': {'title': {'dtype': Value(dtype='string', id=None), 'id': Value(dtype='null', id=None), '_type': Value(dtype='string', id=None)}, 'text': {'dtype': Value(dtype='string', id=None), 'id': Value(dtype='null', id=None), '_type': Value(dtype='string', id=None)}}, 'homepage': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'post_processed': Value(dtype='null', id=None), 'post_processing_size': Value(dtype='null', id=None), 'size_in_bytes': Value(dtype='int64', id=None), 'splits': {'train': {'name': Value(dtype='string', id=None), 'num_bytes': Value(dtype='int64', id=None), 'num_examples': Value(dtype='int64', id=None), 'dataset_name': Value(dtype='string', id=None)}}, 'supervised_keys': Value(dtype='null', id=None), 'task_templates': Value(dtype='null', id=None), 'version': {'version_str': Value(dtype='string', id=None), 'description': Value(dtype='string', id=None), 'major': Value(dtype='int64', id=None), 'minor': Value(dtype='int64', id=None), 'patch': Value(dtype='int64', id=None)}}\r\nbecause column names don't match\r\n\r\n## Environment info\r\n- `datasets` version: 1.18.3\r\n- Platform: Linux-5.4.144+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 6.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3699","id":1130200593,"node_id":"PR_kwDODunzps4yY49I","number":3699,"title":"Add dev-only config to Natural Questions dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-10T14:42:24Z","updated_at":"2022-02-11T09:50:22Z","closed_at":"2022-02-11T09:50:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3699","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3699","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3699.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3699.patch","merged_at":"2022-02-11T09:50:21Z"},"body":"As suggested by @lhoestq and @thomwolf, a new config has been added to Natural Questions dataset, so that only dev split can be downloaded. \r\n\r\nFix #413.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3698","id":1129864282,"node_id":"PR_kwDODunzps4yXtyQ","number":3698,"title":"Add finetune-data CodeFill ","user":{"login":"rgismondi","id":49989029,"node_id":"MDQ6VXNlcjQ5OTg5MDI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49989029?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rgismondi","html_url":"https:\/\/github.com\/rgismondi","followers_url":"https:\/\/api.github.com\/users\/rgismondi\/followers","following_url":"https:\/\/api.github.com\/users\/rgismondi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rgismondi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rgismondi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rgismondi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rgismondi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rgismondi\/repos","events_url":"https:\/\/api.github.com\/users\/rgismondi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rgismondi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T11:12:51Z","updated_at":"2022-02-10T11:12:51Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3698","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3698","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3698.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3698.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3697","id":1129795724,"node_id":"PR_kwDODunzps4yXeXo","number":3697,"title":"Add code-fill datasets for pretraining\/finetuning\/evaluating","user":{"login":"rgismondi","id":49989029,"node_id":"MDQ6VXNlcjQ5OTg5MDI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49989029?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rgismondi","html_url":"https:\/\/github.com\/rgismondi","followers_url":"https:\/\/api.github.com\/users\/rgismondi\/followers","following_url":"https:\/\/api.github.com\/users\/rgismondi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rgismondi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rgismondi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rgismondi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rgismondi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rgismondi\/repos","events_url":"https:\/\/api.github.com\/users\/rgismondi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rgismondi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T10:31:48Z","updated_at":"2022-02-10T11:00:44Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3697","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3697","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3697.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3697.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3696","id":1129764534,"node_id":"PR_kwDODunzps4yXXgH","number":3696,"title":"Force unique keys in newsqa dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T10:09:19Z","updated_at":"2022-02-10T10:09:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3696","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3696","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3696.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3696.patch","merged_at":null},"body":"Currently, it may raise `DuplicatedKeysError`.\r\n\r\nFix #3630.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3695","id":1129730148,"node_id":"PR_kwDODunzps4yXP44","number":3695,"title":"Fix ClassLabel to\/from dict when passed names_file","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T09:47:10Z","updated_at":"2022-02-11T23:02:32Z","closed_at":"2022-02-11T23:02:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3695","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3695","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3695.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3695.patch","merged_at":"2022-02-11T23:02:31Z"},"body":"Currently, `names_file` is a field of the data class `ClassLabel`, thus appearing when transforming it to dict (when saving infos). Afterwards, when trying to read it from infos, it conflicts with the other field `names`.\r\n\r\nThis PR, removes `names_file` as a field of the data class `ClassLabel`.\r\n- it is only used at instantiation to generate the `labels` field\r\n\r\nFix #3631.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3693","id":1128554365,"node_id":"PR_kwDODunzps4yTTcQ","number":3693,"title":"Standardize to `Example::`","user":{"login":"mishig25","id":11827707,"node_id":"MDQ6VXNlcjExODI3NzA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11827707?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mishig25","html_url":"https:\/\/github.com\/mishig25","followers_url":"https:\/\/api.github.com\/users\/mishig25\/followers","following_url":"https:\/\/api.github.com\/users\/mishig25\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mishig25\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mishig25\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mishig25\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mishig25\/orgs","repos_url":"https:\/\/api.github.com\/users\/mishig25\/repos","events_url":"https:\/\/api.github.com\/users\/mishig25\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mishig25\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-09T13:37:13Z","updated_at":"2022-02-09T13:37:13Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3693","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3693","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3693.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3693.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3692","id":1128320004,"node_id":"PR_kwDODunzps4yShiu","number":3692,"title":"Update data URL in pubmed dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-09T10:06:21Z","updated_at":"2022-02-10T14:58:00Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3692","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3692","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3692.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3692.patch","merged_at":null},"body":"Fix #3655.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3691","id":1127629306,"node_id":"PR_kwDODunzps4yQThV","number":3691,"title":"Upgrade black to version ~=22.0","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-08T18:45:19Z","updated_at":"2022-02-08T19:56:40Z","closed_at":"2022-02-08T19:56:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3691","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3691","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3691.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3691.patch","merged_at":"2022-02-08T19:56:39Z"},"body":"Upgrades the `datasets` library quality tool `black` to use the first stable release of `black`, version 22.0.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3690","id":1127493538,"node_id":"PR_kwDODunzps4yP2p5","number":3690,"title":"WIP: update docs to new frontend\/UI","user":{"login":"mishig25","id":11827707,"node_id":"MDQ6VXNlcjExODI3NzA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11827707?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mishig25","html_url":"https:\/\/github.com\/mishig25","followers_url":"https:\/\/api.github.com\/users\/mishig25\/followers","following_url":"https:\/\/api.github.com\/users\/mishig25\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mishig25\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mishig25\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mishig25\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mishig25\/orgs","repos_url":"https:\/\/api.github.com\/users\/mishig25\/repos","events_url":"https:\/\/api.github.com\/users\/mishig25\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mishig25\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-08T16:38:09Z","updated_at":"2022-02-11T16:22:10Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3690","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3690","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3690.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3690.patch","merged_at":null},"body":"### TLDR: Update `datasets` `docs` to the new syntax & frontend (as how it looks on [hf.co\/transformers](https:\/\/huggingface.co\/docs\/transformers\/index))\r\n\r\n## Checklist\r\n\r\n- [ ] update datasets docs to new syntax (should call `doc-builder convert`) (this PR)\r\n- [x] discuss `@property` methods frontend https:\/\/github.com\/huggingface\/doc-builder\/pull\/87\r\n- [x] discuss `inject_arrow_table_documentation` (this PR) https:\/\/github.com\/huggingface\/datasets\/pull\/3690#discussion_r801847860\r\n- [x] update datasets docs path on moon-landing https:\/\/github.com\/huggingface\/moon-landing\/pull\/2089\r\n- [ ] update nginx `docs\/datasets` to route to moon-landing (do similar to internal repo # 81)\r\n- [x] convert pyarrow docstring from Numpydoc style to groups style https:\/\/github.com\/huggingface\/doc-builder\/pull\/89(https:\/\/stackoverflow.com\/a\/24385103\/6558628)\r\n- [x] handle `Raises` section on frontend and doc-builder https:\/\/github.com\/huggingface\/doc-builder\/pull\/86\r\n- [x] check imgs path (this PR) (nothing to update here)\r\n- [ ] delete sphinx related files (this PR)\r\n- [ ] update github actions (doc quality check & PR doc)\r\n- [x] doc exaples block has to follow format `Examples::` https:\/\/github.com\/huggingface\/datasets\/pull\/3693\r\n- [x] add `versions.yml` in doc-build https:\/\/github.com\/huggingface\/doc-build\/pull\/1\r\n- [ ] add `versions.yml` in doc-build-dev ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3689","id":1127422478,"node_id":"PR_kwDODunzps4yPnp7","number":3689,"title":"Fix streaming for servers not supporting HTTP range requests","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2022-02-08T15:41:05Z","updated_at":"2022-02-10T16:51:25Z","closed_at":"2022-02-10T16:51:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3689","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3689","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3689.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3689.patch","merged_at":"2022-02-10T16:51:24Z"},"body":"Some servers do not support HTTP range requests, whereas this is required to stream some file formats (like ZIP).\r\n\r\n~~This PR implements a workaround for those cases, by download the files locally in a temporary directory (cleaned up by the OS once the process is finished).~~\r\n\r\nThis PR raises custom error explaining that streaming is not possible because data host server does not support HTTP range requests.\r\n\r\nFix #3677.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3688","id":1127218321,"node_id":"I_kwDODunzps5DL_yR","number":3688,"title":"Pyarrow version error","user":{"login":"Zaker237","id":49993443,"node_id":"MDQ6VXNlcjQ5OTkzNDQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49993443?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Zaker237","html_url":"https:\/\/github.com\/Zaker237","followers_url":"https:\/\/api.github.com\/users\/Zaker237\/followers","following_url":"https:\/\/api.github.com\/users\/Zaker237\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Zaker237\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Zaker237\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Zaker237\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Zaker237\/orgs","repos_url":"https:\/\/api.github.com\/users\/Zaker237\/repos","events_url":"https:\/\/api.github.com\/users\/Zaker237\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Zaker237\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2022-02-08T12:53:59Z","updated_at":"2022-02-09T06:35:33Z","closed_at":"2022-02-09T06:35:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI installed datasets(version 1.17.0, 1.18.0, 1.18.3) but i'm right now nor able to import it because of pyarrow. when i try to import it, i get the following error:\r\n`To use datasets, the module pyarrow>=3.0.0 is required, and the current version of pyarrow doesn't match this condition`.\r\ni tryed with all version of pyarrow execpt `4.0.0` but still get the same error.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nAttributeError Traceback (most recent call last)\r\n in \r\n----> 1 import datasets\r\n\r\n~\\AppData\\Local\\Continuum\\anaconda3\\lib\\site-packages\\datasets\\__init__.py in \r\n 26 \r\n 27 \r\n---> 28 if _version.parse(pyarrow.__version__).major < 3:\r\n 29 raise ImportWarning(\r\n 30 \"To use `datasets`, the module `pyarrow>=3.0.0` is required, and the current version of `pyarrow` doesn't match this condition.\\n\"\r\n\r\nAttributeError: 'Version' object has no attribute 'major'\r\n\r\n## Environment info\r\nTraceback (most recent call last):\r\n File \"c:\\users\\alex\\appdata\\local\\continuum\\anaconda3\\lib\\runpy.py\", line 193, in _run_module_as_main\r\n \"__main__\", mod_spec)\r\n File \"c:\\users\\alex\\appdata\\local\\continuum\\anaconda3\\lib\\runpy.py\", line 85, in _run_code\r\n exec(code, run_globals)\r\n File \"C:\\Users\\Alex\\AppData\\Local\\Continuum\\anaconda3\\Scripts\\datasets-cli.exe\\__main__.py\", line 5, in \r\n File \"c:\\users\\alex\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages\\datasets\\__init__.py\", line 28, in \r\n if _version.parse(pyarrow.__version__).major < 3:\r\nAttributeError: 'Version' object has no attribute 'major'\r\n\r\n- `datasets` version:\r\n- Platform: Linux(Ubuntu) and Windows: conda on the both\r\n- Python version: 3.7\r\n- PyArrow version: 7.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3687","id":1127154766,"node_id":"I_kwDODunzps5DLwRO","number":3687,"title":"Can't get the text data when calling to_tf_dataset","user":{"login":"phrasenmaeher","id":82086367,"node_id":"MDQ6VXNlcjgyMDg2MzY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/82086367?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/phrasenmaeher","html_url":"https:\/\/github.com\/phrasenmaeher","followers_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/followers","following_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/orgs","repos_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/repos","events_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"assignees":[{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2022-02-08T11:52:10Z","updated_at":"2022-02-08T16:54:55Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am working with the SST2 dataset, and am using TensorFlow 2.5\r\nI'd like to convert it to a `tf.data.Dataset` by calling the `to_tf_dataset` method.\r\nThe following snippet is what I am using to achieve this:\r\n\r\n```\r\nfrom datasets import load_dataset\r\nfrom transformers import DefaultDataCollator\r\n\r\ndata_collator = DefaultDataCollator(return_tensors=\"tf\")\r\ndataset = load_dataset(\"sst\")\r\n\r\ntrain_dataset = dataset[\"train\"].to_tf_dataset(columns=['sentence'], label_cols=\"label\", shuffle=True, batch_size=8,collate_fn=data_collator)\r\n```\r\nHowever, this only gets me the labels; the text--the most important part--is missing:\r\n\r\n```\r\nfor s in train_dataset.take(1):\r\n print(s) #prints something like: ({}, )\r\n```\r\n\r\nAs you can see, it only returns the label part, not the data, as indicated by the empty dictionary, `{}`. So far, I've played with various settings of the method arguments, but to no avail; I do not want to perform any text processing at this time. On my quest to achieve what I want ( a `tf.data.Dataset`), I've consulted these resources:\r\n\r\n[https:\/\/www.philschmid.de\/huggingface-transformers-keras-tf](https:\/\/www.philschmid.de\/huggingface-transformers-keras-tf)\r\n[https:\/\/huggingface.co\/docs\/datasets\/use_dataset.html?highlight=tensorflow](https:\/\/huggingface.co\/docs\/datasets\/use_dataset.html?highlight=tensorflow)\r\n\r\nI was surprised to not find more extensive examples on how to transform a Hugginface dataset to one compatible with TensorFlow.\r\n\r\nIf you could point me to where I am going wrong, please do so.\r\nThanks in advance for your support.\r\n\r\n---\r\nEdit: In the [docs](https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html#datasets.Dataset.to_tf_dataset), I found the following description:\r\n\r\n\r\n_In general, only columns that the model can use as input should be included here (numeric data only)._\r\n\r\nDoes this imply that no textual, i.e., `string` data can be loaded?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3686","id":1127137290,"node_id":"I_kwDODunzps5DLsAK","number":3686,"title":"`Translation` features cannot be `flatten`ed","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-02-08T11:33:48Z","updated_at":"2022-02-08T13:52:34Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n(`Dataset.flatten`)[https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/arrow_dataset.py#L1265] fails for columns with feature (`Translation`)[https:\/\/github.com\/huggingface\/datasets\/blob\/3edbeb0ec6519b79f1119adc251a1a6b379a2c12\/src\/datasets\/features\/translation.py#L8]\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"europa_ecdc_tm\", \"en2fr\", split=\"train[:10]\")\r\nprint(dataset.features)\r\n# {'translation': Translation(languages=['en', 'fr'], id=None)}\r\nprint(dataset[0])\r\n# {'translation': {'en': 'Vaccination against hepatitis C is not yet available.', 'fr': 'Aucune vaccination contre l\u2019h\u00e9patite C n\u2019est encore disponible.'}}\r\n\r\ndataset.flatten()\r\n```\r\n\r\n## Expected results\r\n\r\n`dataset.flatten` should flatten the `Translation` column as if it were a dict of `Value(\"string\")`\r\n\r\n```python\r\ndataset[0]\r\n# {'translation.en': 'Vaccination against hepatitis C is not yet available.', 'translation.fr': 'Aucune vaccination contre l\u2019h\u00e9patite C n\u2019est encore disponible.' }\r\ndataset.features\r\n# {'translation.en': Value(\"string\"), 'translation.fr': Value(\"string\")}\r\n```\r\n\r\n## Actual results\r\n\r\n```python\r\nIn [31]: dset.flatten()\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n in \r\n----> 1 dset.flatten()\r\n\r\n[...]\\site-packages\\datasets\\fingerprint.py in wrapper(*args, **kwargs)\r\n 411 # Call actual function\r\n 412\r\n--> 413 out = func(self, *args, **kwargs)\r\n 414\r\n 415 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n[...]\\site-packages\\datasets\\arrow_dataset.py in flatten(self, new_fingerprint, max_depth)\r\n 1294 break\r\n 1295 dataset.info.features = self.features.flatten(max_depth=max_depth)\r\n-> 1296 dataset._data = update_metadata_with_features(dataset._data, dataset.features)\r\n 1297 logger.info(f'Flattened dataset from depth {depth} to depth {1 if depth + 1 < max_depth else \"unknown\"}.')\r\n 1298 dataset._fingerprint = new_fingerprint\r\n\r\n[...]\\site-packages\\datasets\\arrow_dataset.py in update_metadata_with_features(table, features)\r\n 534 def update_metadata_with_features(table: Table, features: Features):\r\n 535 \"\"\"To be used in dataset transforms that modify the features of the dataset, in order to update the features stored in the metadata of its schema.\"\"\"\r\n--> 536 features = Features({col_name: features[col_name] for col_name in table.column_names})\r\n 537 if table.schema.metadata is None or b\"huggingface\" not in table.schema.metadata:\r\n 538 pa_metadata = ArrowWriter._build_metadata(DatasetInfo(features=features))\r\n\r\n[...]\\site-packages\\datasets\\arrow_dataset.py in (.0)\r\n 534 def update_metadata_with_features(table: Table, features: Features):\r\n 535 \"\"\"To be used in dataset transforms that modify the features of the dataset, in order to update the features stored in the metadata of its schema.\"\"\"\r\n--> 536 features = Features({col_name: features[col_name] for col_name in table.column_names})\r\n 537 if table.schema.metadata is None or b\"huggingface\" not in table.schema.metadata:\r\n 538 pa_metadata = ArrowWriter._build_metadata(DatasetInfo(features=features))\r\n\r\nKeyError: 'translation.en'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.3\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3685","id":1126240444,"node_id":"PR_kwDODunzps4yLw3m","number":3685,"title":"Add support for `Audio` and `Image` feature in `push_to_hub`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-07T16:47:16Z","updated_at":"2022-02-11T19:40:00Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3685","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3685","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3685.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3685.patch","merged_at":null},"body":"Add support for the `Audio` and the `Image` feature in `push_to_hub`. \r\n\r\nThe idea is to remove local path information and store file content under \"bytes\" in the Arrow table before the push.\r\n\r\nMy initial approach (https:\/\/github.com\/huggingface\/datasets\/commit\/34c652afeff9686b6b8bf4e703c84d2205d670aa) was to use a map transform similar to [`decode_nested_example`](https:\/\/github.com\/huggingface\/datasets\/blob\/5e0f6068741464f833ff1802e24ecc2064aaea9f\/src\/datasets\/features\/features.py#L1023-L1056) while having decoding turned off, but I wasn't satisfied with the code quality, so I ended up using the `temporary_assignment` decorator to override `cast_storage`, which allows me to directly modify the underlying storage (the final op is similar to `Dataset.cast`) and results in a much simpler code. \r\n\r\nAdditionally, I added the `allow_cast` flag that can disable this behavior in the situations where it's not needed (e.g. the dataset is already in the correct format for the Hub, etc.)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3684","id":1125133664,"node_id":"PR_kwDODunzps4yIOer","number":3684,"title":"[fix]: iwslt2017 download urls","user":{"login":"msarmi9","id":48395294,"node_id":"MDQ6VXNlcjQ4Mzk1Mjk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48395294?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/msarmi9","html_url":"https:\/\/github.com\/msarmi9","followers_url":"https:\/\/api.github.com\/users\/msarmi9\/followers","following_url":"https:\/\/api.github.com\/users\/msarmi9\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/msarmi9\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/msarmi9\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/msarmi9\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/msarmi9\/orgs","repos_url":"https:\/\/api.github.com\/users\/msarmi9\/repos","events_url":"https:\/\/api.github.com\/users\/msarmi9\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/msarmi9\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-02-06T07:56:55Z","updated_at":"2022-02-09T08:39:31Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3684","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3684","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3684.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3684.patch","merged_at":null},"body":"Fixes #2076.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3683","id":1124458371,"node_id":"PR_kwDODunzps4yGKoj","number":3683,"title":"added told-br (brazilian hate speech) dataset","user":{"login":"JAugusto97","id":26556320,"node_id":"MDQ6VXNlcjI2NTU2MzIw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26556320?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JAugusto97","html_url":"https:\/\/github.com\/JAugusto97","followers_url":"https:\/\/api.github.com\/users\/JAugusto97\/followers","following_url":"https:\/\/api.github.com\/users\/JAugusto97\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JAugusto97\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JAugusto97\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JAugusto97\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JAugusto97\/orgs","repos_url":"https:\/\/api.github.com\/users\/JAugusto97\/repos","events_url":"https:\/\/api.github.com\/users\/JAugusto97\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JAugusto97\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-04T17:44:32Z","updated_at":"2022-02-07T21:14:52Z","closed_at":"2022-02-07T21:14:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3683","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3683","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3683.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3683.patch","merged_at":"2022-02-07T21:14:52Z"},"body":"Hey,\r\n\r\nAdding ToLD-Br. Feel free to ask for modifications. \r\n\r\nThanks!!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3682","id":1124434330,"node_id":"PR_kwDODunzps4yGFml","number":3682,"title":"adding told-br for toxic\/abusive hatespeech detection","user":{"login":"JAugusto97","id":26556320,"node_id":"MDQ6VXNlcjI2NTU2MzIw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26556320?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JAugusto97","html_url":"https:\/\/github.com\/JAugusto97","followers_url":"https:\/\/api.github.com\/users\/JAugusto97\/followers","following_url":"https:\/\/api.github.com\/users\/JAugusto97\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JAugusto97\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JAugusto97\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JAugusto97\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JAugusto97\/orgs","repos_url":"https:\/\/api.github.com\/users\/JAugusto97\/repos","events_url":"https:\/\/api.github.com\/users\/JAugusto97\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JAugusto97\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-04T17:18:29Z","updated_at":"2022-02-07T03:23:24Z","closed_at":"2022-02-04T17:36:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3682","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3682","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3682.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3682.patch","merged_at":null},"body":"Hey, \r\n\r\nI'm adding our dataset from our paper published at AACL 2020. Feel free to ask for modifications.\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3681","id":1124237458,"node_id":"PR_kwDODunzps4yFcpM","number":3681,"title":"Fix TestCommand to move dataset_infos instead of copying","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2022-02-04T14:01:52Z","updated_at":"2022-02-04T18:47:16Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3681","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3681","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3681.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3681.patch","merged_at":null},"body":"Why do we copy instead of moving the file?\r\n\r\nCC: @lhoestq @lvwerra ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3680","id":1124213416,"node_id":"PR_kwDODunzps4yFXm8","number":3680,"title":"Fix TestCommand to copy dataset_infos to local dir with only data files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-04T13:36:46Z","updated_at":"2022-02-08T10:32:55Z","closed_at":"2022-02-08T10:32:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3680","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3680","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3680.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3680.patch","merged_at":"2022-02-08T10:32:55Z"},"body":"Currently this case is missed.\r\n\r\nCC: @lvwerra ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3679","id":1124062133,"node_id":"I_kwDODunzps5C_9O1","number":3679,"title":"Download datasets from a private hub","user":{"login":"juliensimon","id":3436143,"node_id":"MDQ6VXNlcjM0MzYxNDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3436143?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/juliensimon","html_url":"https:\/\/github.com\/juliensimon","followers_url":"https:\/\/api.github.com\/users\/juliensimon\/followers","following_url":"https:\/\/api.github.com\/users\/juliensimon\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/juliensimon\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/juliensimon\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/juliensimon\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/juliensimon\/orgs","repos_url":"https:\/\/api.github.com\/users\/juliensimon\/repos","events_url":"https:\/\/api.github.com\/users\/juliensimon\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/juliensimon\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3814924348,"node_id":"LA_kwDODunzps7jYyA8","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/private-hub","name":"private-hub","color":"A929D8","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-04T10:49:06Z","updated_at":"2022-02-09T15:04:25Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the context of a private hub deployment, customers would like to use load_dataset() to load datasets from their hub, not from the public hub. This doesn't seem to be configurable at the moment and it would be nice to add this feature.\r\n\r\nThe obvious workaround is to clone the repo first and then load it from local storage, but this adds an extra step. It'd be great to have the same experience regardless of where the hub is hosted.\r\n\r\nThe same issue exists with the transformers library and the CLI. I'm going to create issues there as well, and I'll reference them below.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3678","id":1123402426,"node_id":"PR_kwDODunzps4yCt91","number":3678,"title":"Add code example in wikipedia card","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-03T18:09:02Z","updated_at":"2022-02-04T13:21:39Z","closed_at":"2022-02-04T13:21:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3678","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3678","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3678.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3678.patch","merged_at":"2022-02-04T13:21:39Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3677","id":1123192866,"node_id":"I_kwDODunzps5C8pAi","number":3677,"title":"Discovery cannot be streamed anymore","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-02-03T15:02:03Z","updated_at":"2022-02-10T16:51:24Z","closed_at":"2022-02-10T16:51:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\niterable_dataset = load_dataset(\"discovery\", name=\"discovery\", split=\"train\", streaming=True)\r\nlist(iterable_dataset.take(1))\r\n```\r\n\r\n## Expected results\r\n\r\nThe first row of the train split.\r\n\r\n## Actual results\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/iterable_dataset.py\", line 365, in __iter__\r\n for key, example in self._iter():\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/iterable_dataset.py\", line 362, in _iter\r\n yield from ex_iterable\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/iterable_dataset.py\", line 272, in __iter__\r\n yield from islice(self.ex_iterable, self.n)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/iterable_dataset.py\", line 79, in __iter__\r\n yield from self.generate_examples_fn(**self.kwargs)\r\n File \"\/home\/slesage\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/discovery\/542fab7a9ddc1d9726160355f7baa06a1ccc44c40bc8e12c09e9bc743aca43a2\/discovery.py\", line 333, in _generate_examples\r\n with open(data_file, encoding=\"utf8\") as f:\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/streaming.py\", line 64, in wrapper\r\n return function(*args, use_auth_token=use_auth_token, **kwargs)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/utils\/streaming_download_manager.py\", line 369, in xopen\r\n file_obj = fsspec.open(file, mode=mode, *args, **kwargs).open()\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/core.py\", line 456, in open\r\n return open_files(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/core.py\", line 288, in open_files\r\n fs, fs_token, paths = get_fs_token_paths(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/core.py\", line 611, in get_fs_token_paths\r\n fs = filesystem(protocol, **inkwargs)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/registry.py\", line 253, in filesystem\r\n return cls(**storage_options)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/spec.py\", line 68, in __call__\r\n obj = super().__call__(*args, **kwargs)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/implementations\/zip.py\", line 57, in __init__\r\n self.zip = zipfile.ZipFile(self.fo)\r\n File \"\/home\/slesage\/.pyenv\/versions\/3.9.6\/lib\/python3.9\/zipfile.py\", line 1257, in __init__\r\n self._RealGetContents()\r\n File \"\/home\/slesage\/.pyenv\/versions\/3.9.6\/lib\/python3.9\/zipfile.py\", line 1320, in _RealGetContents\r\n endrec = _EndRecData(fp)\r\n File \"\/home\/slesage\/.pyenv\/versions\/3.9.6\/lib\/python3.9\/zipfile.py\", line 263, in _EndRecData\r\n fpin.seek(0, 2)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/implementations\/http.py\", line 676, in seek\r\n raise ValueError(\"Cannot seek streaming HTTP file\")\r\nValueError: Cannot seek streaming HTTP file\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.3\r\n- Platform: Linux-5.11.0-1027-aws-x86_64-with-glibc2.31\r\n- Python version: 3.9.6\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3676","id":1123096362,"node_id":"I_kwDODunzps5C8Rcq","number":3676,"title":"`None` replaced by `[]` after first batch in map","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-02-03T13:36:48Z","updated_at":"2022-02-03T16:30:52Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Sometimes `None` can be replaced by `[]` when running map:\r\n\r\n```python\r\nfrom datasets import Dataset\r\n\r\nds = Dataset.from_dict({\"a\": range(4)})\r\nds = ds.map(lambda x: {\"b\": [[None, [0]]]}, batched=True, batch_size=1, remove_columns=[\"a\"])\r\nprint(ds.to_pandas())\r\n# b\r\n# 0 [None, [0]]\r\n# 1 [[], [0]]\r\n# 2 [[], [0]]\r\n# 3 [[], [0]]\r\n```\r\n\r\nThis issue has been experienced when running the `run_qa.py` example from `transformers` (see issue https:\/\/github.com\/huggingface\/transformers\/issues\/15401)\r\n\r\nThis can be due to a bug in when casting `None` in nested lists. Casting only happens after the first batch, since the first batch is used to infer the feature types.\r\n\r\ncc @sgugger ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3675","id":1123078408,"node_id":"I_kwDODunzps5C8NEI","number":3675,"title":"Add CodeContests dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-02-03T13:20:00Z","updated_at":"2022-02-10T20:50:38Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** CodeContests\r\n- **Description:** CodeContests is a competitive programming dataset for machine-learning.\r\n- **Paper:**\r\n- **Data:** https:\/\/github.com\/deepmind\/code_contests\r\n- **Motivation:** This dataset was used when training [AlphaCode](https:\/\/deepmind.com\/blog\/article\/Competitive-programming-with-AlphaCode).\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3674","id":1123027874,"node_id":"PR_kwDODunzps4yBe17","number":3674,"title":"Add FrugalScore metric","user":{"login":"moussaKam","id":28675016,"node_id":"MDQ6VXNlcjI4Njc1MDE2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28675016?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/moussaKam","html_url":"https:\/\/github.com\/moussaKam","followers_url":"https:\/\/api.github.com\/users\/moussaKam\/followers","following_url":"https:\/\/api.github.com\/users\/moussaKam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/moussaKam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/moussaKam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/moussaKam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/moussaKam\/orgs","repos_url":"https:\/\/api.github.com\/users\/moussaKam\/repos","events_url":"https:\/\/api.github.com\/users\/moussaKam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/moussaKam\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-03T12:28:52Z","updated_at":"2022-02-08T15:28:56Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3674","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3674","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3674.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3674.patch","merged_at":null},"body":"This pull request add FrugalScore metric for NLG systems evaluation.\r\n\r\nFrugalScore is a reference-based metric for NLG models evaluation. It is based on a distillation approach that allows to learn a fixed, low cost version of any expensive NLG metric, while retaining most of its original performance.\r\n\r\nPaper: https:\/\/arxiv.org\/abs\/2110.08559?context=cs\r\nGithub: https:\/\/github.com\/moussaKam\/FrugalScore\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3673","id":1123010520,"node_id":"I_kwDODunzps5C78fY","number":3673,"title":"`load_dataset(\"snli\")` is different from dataset viewer","user":{"login":"pietrolesci","id":61748653,"node_id":"MDQ6VXNlcjYxNzQ4NjUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/61748653?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pietrolesci","html_url":"https:\/\/github.com\/pietrolesci","followers_url":"https:\/\/api.github.com\/users\/pietrolesci\/followers","following_url":"https:\/\/api.github.com\/users\/pietrolesci\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pietrolesci\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pietrolesci\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pietrolesci\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pietrolesci\/orgs","repos_url":"https:\/\/api.github.com\/users\/pietrolesci\/repos","events_url":"https:\/\/api.github.com\/users\/pietrolesci\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pietrolesci\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"assignees":[{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":9,"created_at":"2022-02-03T12:10:43Z","updated_at":"2022-02-11T17:01:21Z","closed_at":"2022-02-11T17:01:21Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe dataset that is downloaded from the Hub via `load_dataset(\"snli\")` is different from what is available in the dataset viewer. In the viewer the labels are not encoded (i.e., \"neutral\", \"entailment\", \"contradiction\"), while the downloaded dataset shows the encoded labels (i.e., 0, 1, 2).\r\n\r\nIs this expected? \r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform: Ubuntu 20.4\r\n- Python version: 3.7\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3672","id":1122980556,"node_id":"PR_kwDODunzps4yBUrZ","number":3672,"title":"Prioritize `module.builder_kwargs` over defaults in `TestCommand`","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-03T11:38:42Z","updated_at":"2022-02-04T12:37:20Z","closed_at":"2022-02-04T12:37:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3672","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3672","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3672.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3672.patch","merged_at":"2022-02-04T12:37:19Z"},"body":"This fixes a bug in the `TestCommand` where multiple kwargs for `name` were passed if it was set in both default and `module.builder_kwargs`. Example error:\r\n\r\n```Python\r\nTraceback (most recent call last):\r\n File \"create_metadata.py\", line 96, in \r\n main(**vars(args))\r\n File \"create_metadata.py\", line 86, in main\r\n metadata_command.run()\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/commands\/test.py\", line 144, in run\r\n for j, builder in enumerate(get_builders()):\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/commands\/test.py\", line 141, in get_builders\r\n name=name, cache_dir=self._cache_dir, data_dir=self._data_dir, **module.builder_kwargs\r\nTypeError: type object got multiple values for keyword argument 'name'\r\n```\r\n\r\nLet me know what you think.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3671","id":1122864253,"node_id":"I_kwDODunzps5C7Yx9","number":3671,"title":"Give an estimate of the dataset size in DatasetInfo","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-03T09:47:10Z","updated_at":"2022-02-03T09:47:10Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nCurrently, only part of the datasets provide `dataset_size`, `download_size`, `size_in_bytes` (and `num_bytes` and `num_examples` inside `splits`). I would want to get this information, or an estimation, for all the datasets.\r\n\r\n**Describe the solution you'd like**\r\n\r\n- get access to the git information for the dataset files hosted on the hub\r\n- look at the [`Content-Length`](https:\/\/developer.mozilla.org\/en-US\/docs\/Web\/HTTP\/Headers\/Content-Length) for the files served by HTTP\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3670","id":1122439827,"node_id":"PR_kwDODunzps4x_kBx","number":3670,"title":"feat: \ud83c\udfb8 generate info if dataset_infos.json does not exist","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-02T22:11:56Z","updated_at":"2022-02-11T20:24:35Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3670","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3670","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3670.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3670.patch","merged_at":null},"body":"in get_dataset_infos(). Also: add the `use_auth_token` parameter, and create get_dataset_config_info()\r\n\r\n\u2705 Closes: #3013","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3669","id":1122335622,"node_id":"PR_kwDODunzps4x_OTI","number":3669,"title":"Common voice validated partition","user":{"login":"shalymin-amzn","id":98762373,"node_id":"U_kgDOBeL-hQ","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/98762373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shalymin-amzn","html_url":"https:\/\/github.com\/shalymin-amzn","followers_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/followers","following_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/orgs","repos_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/repos","events_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2022-02-02T20:04:43Z","updated_at":"2022-02-08T17:26:52Z","closed_at":"2022-02-08T17:23:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3669","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3669","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3669.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3669.patch","merged_at":"2022-02-08T17:23:12Z"},"body":"This patch adds access to the 'validated' partitions of CommonVoice datasets (provided by the dataset creators but not available in the HuggingFace interface yet).\r\nAs 'validated' contains significantly more data than 'train' (although it contains both test and validation, so one needs to be careful there), it can be useful to train better models where no strict comparison with the previous work is intended.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3668","id":1122261736,"node_id":"I_kwDODunzps5C5Fro","number":3668,"title":" Couldn't cast array of type string error with cast_column","user":{"login":"R4ZZ3","id":25264037,"node_id":"MDQ6VXNlcjI1MjY0MDM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25264037?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/R4ZZ3","html_url":"https:\/\/github.com\/R4ZZ3","followers_url":"https:\/\/api.github.com\/users\/R4ZZ3\/followers","following_url":"https:\/\/api.github.com\/users\/R4ZZ3\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/R4ZZ3\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/R4ZZ3\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/R4ZZ3\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/R4ZZ3\/orgs","repos_url":"https:\/\/api.github.com\/users\/R4ZZ3\/repos","events_url":"https:\/\/api.github.com\/users\/R4ZZ3\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/R4ZZ3\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-02T18:33:29Z","updated_at":"2022-02-09T07:07:42Z","closed_at":"2022-02-09T07:07:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n\r\nIn OVH cloud during Huggingface Robust-speech-recognition event on a AI training notebook instance using jupyter lab and running jupyter notebook When using the dataset.cast_column(\"audio\",Audio(sampling_rate=16_000))\r\nmethod I get error\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152214027-9c42a71a-dd24-463c-a346-57e0287e5a8f.png)\r\n\r\nThis was working with datasets version 1.17.1.dev0\r\nbut now with version 1.18.3 produces the error above.\r\n\r\n## Steps to reproduce the bug\r\n\r\nload dataset:\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152216145-159553b6-cddc-4f0b-8607-7e76b600e22a.png)\r\n\r\n\r\nremove columns:\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152214707-7c7e89d1-87d8-4b4f-8cfc-5d7223d35644.png)\r\n\r\nrun my fix_path function.\r\nThis also creates the audio column that is referring to the absolute file path of the audio\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152214773-51f71ccf-d31b-4449-b63a-1af56436e49f.png)\r\n\r\nThen I concatenate few other datasets and finally try the cast_column method\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152215032-f341ec86-9d6d-48c9-943b-e2efe37a4d98.png)\r\n\r\nbut get error:\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152215073-b85bd057-98e8-413c-9b05-51e9805f2c24.png)\r\n\r\n\r\n\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.3\r\n- Platform: \r\nOVH Cloud, AI Training section, container for Huggingface Robust Speech Recognition event image(baaastijn\/ovh_huggingface)\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152215161-b4ff7bfb-2736-4afb-9223-761a3338d23c.png)\r\n\r\n- Python version: 3.8.8\r\n- PyArrow version:\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152215936-4d365760-557e-456b-b5eb-ad1d15cf5073.png)\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3667","id":1122060630,"node_id":"PR_kwDODunzps4x-Ujt","number":3667,"title":"Process .opus files with torchaudio","user":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"assignees":[{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2022-02-02T15:23:14Z","updated_at":"2022-02-04T15:29:38Z","closed_at":"2022-02-04T15:29:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3667","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3667","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3667.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3667.patch","merged_at":null},"body":"@anton-l suggested to proccess .opus files with `torchaudio` instead of `soundfile` as it's faster: \r\n![opus](https:\/\/user-images.githubusercontent.com\/16348744\/152177816-2df6076c-f28b-4aef-a08d-b499b921414d.png)\r\n\r\n(moreover, I didn't manage to load .opus files with `soundfile` \/ `librosa` locally on any my machine anyway for some reason, even with `ffmpeg` installed).\r\n\r\nFor now my current changes work with locally stored file:\r\n```python\r\n# download sample opus file (from MultilingualSpokenWords dataset)\r\n!wget https:\/\/huggingface.co\/datasets\/polinaeterna\/test_opus\/resolve\/main\/common_voice_tt_17737010.opus \r\n\r\nfrom datasets import Dataset, Audio\r\n\r\naudio_path = \"common_voice_tt_17737010.opus\"\r\ndataset = Dataset.from_dict({\"audio\": [audio_path]}).cast_column(\"audio\", Audio(48000))\r\ndataset[0]\r\n# {'audio': {'path': 'common_voice_tt_17737010.opus',\r\n# 'array': array([ 0.0000000e+00, 0.0000000e+00, 3.0517578e-05, ...,\r\n# -6.1035156e-05, 6.1035156e-05, 0.0000000e+00], dtype=float32),\r\n# 'sampling_rate': 48000}}\r\n```\r\nBut it doesn't work when loading inside s dataset from bytes (I checked on [MultilingualSpokenWords](https:\/\/github.com\/huggingface\/datasets\/pull\/3666), the PR is a draft now, maybe the bug is somewhere there )\r\n\r\n```python\r\nimport torchaudio\r\nwith open(audio_path, \"rb\") as b:\r\n print(torchaudio.load(b))\r\n# RuntimeError: Error loading audio file: failed to open file \r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3666","id":1122058894,"node_id":"PR_kwDODunzps4x-ULz","number":3666,"title":"Multilingual Spoken Words","user":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-02-02T15:21:48Z","updated_at":"2022-02-11T17:30:28Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3666","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3666","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3666.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3666.patch","merged_at":null},"body":"Add [Multillingual Spoken Words dataset](https:\/\/mlcommons.org\/en\/multilingual-spoken-words\/)\r\n\r\nYou can specify multiple languages for downloading \ud83d\ude0c:\r\n```python\r\nds = load_dataset(\"datasets\/ml_spoken_words\", languages=[\"ar\", \"tt\"])\r\n```\r\n\r\n1. I didn't take into account that each time you pass a set of languages the data for a specific language is downloaded even if it was downloaded before (since these are custom configs like `ar+tt` and `ar+tt+br`. Maybe that wasn't a good idea?\r\n2. The script will have to be slightly changed after merge of https:\/\/github.com\/huggingface\/datasets\/pull\/3664 \r\n2. Just can't figure out what wrong with dummy files... \ud83d\ude1e Maybe we should get rid of them at some point \ud83d\ude01","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3665","id":1121753385,"node_id":"PR_kwDODunzps4x9TnU","number":3665,"title":"Fix MP3 resampling when a dataset's audio files have different sampling rates","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-02T10:31:45Z","updated_at":"2022-02-02T10:52:26Z","closed_at":"2022-02-02T10:52:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3665","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3665","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3665.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3665.patch","merged_at":"2022-02-02T10:52:25Z"},"body":"The resampler needs to be updated if the `orig_freq` doesn't match the audio file sampling rate\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/3662","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3664","id":1121233301,"node_id":"PR_kwDODunzps4x7mg_","number":3664,"title":"[WIP] Return local paths to Common Voice","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2022-02-01T21:48:27Z","updated_at":"2022-02-11T23:32:08Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3664","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3664","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3664.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3664.patch","merged_at":null},"body":"Fixes https:\/\/github.com\/huggingface\/datasets\/issues\/3663\r\n\r\nThis is a proposed way of returning the old local file-based generator while keeping the new streaming generator intact.\r\n\r\nTODO:\r\n- [ ] brainstorm a bit more on https:\/\/github.com\/huggingface\/datasets\/issues\/3663 to see if we can do better\r\n- [ ] refactor the heck out of this PR to avoid completely copying the logic between the two generators","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3663","id":1121067647,"node_id":"I_kwDODunzps5C0iJ_","number":3663,"title":"[Audio] Path of Common Voice cannot be used for audio loading anymore","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2022-02-01T18:40:10Z","updated_at":"2022-02-08T16:05:18Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nfrom torchaudio import load\r\n\r\nds = load_dataset(\"common_voice\", \"ab\", split=\"train\")\r\n\r\n# both of the following commands fail at the moment\r\nload(ds[0][\"audio\"][\"path\"])\r\nload(ds[0][\"path\"])\r\n```\r\n\r\n## Expected results\r\n\r\nThe path should be the complete absolute path to the downloaded audio file not some relative path.\r\n\r\n\r\n## Actual results\r\n\r\n```bash\r\n~\/hugging_face\/venv_3.9\/lib\/python3.9\/site-packages\/torchaudio\/backend\/sox_io_backend.py in load(filepath, frame_offset, num_frames, normalize, channels_first, format)\r\n 150 filepath, frame_offset, num_frames, normalize, channels_first, format)\r\n 151 filepath = os.fspath(filepath)\r\n--> 152 return torch.ops.torchaudio.sox_io_load_audio_file(\r\n 153 filepath, frame_offset, num_frames, normalize, channels_first, format)\r\n 154\r\n\r\nRuntimeError: Error loading audio file: failed to open file cv-corpus-6.1-2020-12-11\/ab\/clips\/common_voice_ab_19904194.mp3\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.3.dev0\r\n- Platform: Linux-5.4.0-96-generic-x86_64-with-glibc2.27\r\n- Python version: 3.9.1\r\n- PyArrow version: 3.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3662","id":1121024403,"node_id":"I_kwDODunzps5C0XmT","number":3662,"title":"[Audio] MP3 resampling is incorrect when dataset's audio files have different sampling rates","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2022-02-01T17:55:04Z","updated_at":"2022-02-02T10:52:25Z","closed_at":"2022-02-02T10:52:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The Audio feature resampler for MP3 gets stuck with the first original frequencies it meets, which leads to subsequent decoding to be incorrect.\r\n\r\nHere is a code to reproduce the issue:\r\n\r\nLet's first consider two audio files with different sampling rates 32000 and 16000:\r\n```python\r\n# first download a mp3 file with sampling_rate=32000\r\n!wget https:\/\/file-examples-com.github.io\/uploads\/2017\/11\/file_example_MP3_700KB.mp3\r\n\r\nimport torchaudio\r\n\r\naudio_path = \"file_example_MP3_700KB.mp3\"\r\naudio_path2 = audio_path.replace(\".mp3\", \"_resampled.mp3\")\r\nresample = torchaudio.transforms.Resample(32000, 16000) # create a new file with sampling_rate=16000\r\ntorchaudio.save(audio_path2, resample(torchaudio.load(audio_path)[0]), 16000)\r\n```\r\n\r\nThen we can see an issue here when decoding:\r\n```python\r\nfrom datasets import Dataset, Audio\r\n\r\ndataset = Dataset.from_dict({\"audio\": [audio_path, audio_path2]}).cast_column(\"audio\", Audio(48000))\r\ndataset[0] # decode the first audio file sets the resampler orig_freq to 32000\r\nprint(dataset .features[\"audio\"]._resampler.orig_freq)\r\n# 32000\r\nprint(dataset[0][\"audio\"][\"array\"].shape) # here decoding is fine\r\n# (1308096,)\r\n\r\ndataset = Dataset.from_dict({\"audio\": [audio_path, audio_path2]}).cast_column(\"audio\", Audio(48000))\r\ndataset[1] # decode the second audio file sets the resampler orig_freq to 16000\r\nprint(dataset .features[\"audio\"]._resampler.orig_freq)\r\n# 16000\r\nprint(dataset[0][\"audio\"][\"array\"].shape) # here decoding uses orig_freq=16000 instead of 32000\r\n# (2616192,)\r\n```\r\n\r\nThe value of `orig_freq` doesn't change no matter what file needs to be decoded\r\n\r\ncc @patrickvonplaten @anton-l @cahya-wirawan @albertvillanova \r\n\r\nThe issue seems to be here in `Audio.decode_mp3`:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/4c417d52def6e20359ca16c6723e0a2855e5c3fd\/src\/datasets\/features\/audio.py#L176-L180","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3661","id":1121000251,"node_id":"PR_kwDODunzps4x61ad","number":3661,"title":"Remove unnecessary 'r' arg in","user":{"login":"bryant1410","id":3905501,"node_id":"MDQ6VXNlcjM5MDU1MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3905501?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bryant1410","html_url":"https:\/\/github.com\/bryant1410","followers_url":"https:\/\/api.github.com\/users\/bryant1410\/followers","following_url":"https:\/\/api.github.com\/users\/bryant1410\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bryant1410\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bryant1410\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bryant1410\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bryant1410\/orgs","repos_url":"https:\/\/api.github.com\/users\/bryant1410\/repos","events_url":"https:\/\/api.github.com\/users\/bryant1410\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bryant1410\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-02-01T17:29:27Z","updated_at":"2022-02-07T16:57:27Z","closed_at":"2022-02-07T16:02:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3661","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3661","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3661.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3661.patch","merged_at":"2022-02-07T16:02:42Z"},"body":"Originally from #3489","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3660","id":1120982671,"node_id":"PR_kwDODunzps4x6xr8","number":3660,"title":"Change HTTP links to HTTPS","user":{"login":"bryant1410","id":3905501,"node_id":"MDQ6VXNlcjM5MDU1MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3905501?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bryant1410","html_url":"https:\/\/github.com\/bryant1410","followers_url":"https:\/\/api.github.com\/users\/bryant1410\/followers","following_url":"https:\/\/api.github.com\/users\/bryant1410\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bryant1410\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bryant1410\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bryant1410\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bryant1410\/orgs","repos_url":"https:\/\/api.github.com\/users\/bryant1410\/repos","events_url":"https:\/\/api.github.com\/users\/bryant1410\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bryant1410\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-01T17:12:51Z","updated_at":"2022-02-01T18:34:47Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3660","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3660","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3660.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3660.patch","merged_at":null},"body":"I tested the links. I also fixed some typos.\r\n\r\nOriginally from #3489","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3659","id":1120913672,"node_id":"I_kwDODunzps5Cz8kI","number":3659,"title":"push_to_hub but preview not working","user":{"login":"thomas-happify","id":66082334,"node_id":"MDQ6VXNlcjY2MDgyMzM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66082334?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomas-happify","html_url":"https:\/\/github.com\/thomas-happify","followers_url":"https:\/\/api.github.com\/users\/thomas-happify\/followers","following_url":"https:\/\/api.github.com\/users\/thomas-happify\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomas-happify\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomas-happify\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomas-happify\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomas-happify\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomas-happify\/repos","events_url":"https:\/\/api.github.com\/users\/thomas-happify\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomas-happify\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-02-01T16:23:57Z","updated_at":"2022-02-09T08:00:37Z","closed_at":"2022-02-09T08:00:37Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*happifyhealth\/twitter_pnn*'\r\n\r\n**Link:** *[link to the dataset viewer page](https:\/\/huggingface.co\/datasets\/happifyhealth\/twitter_pnn)*\r\n\r\nI used \r\n```\r\ndataset.push_to_hub(\"happifyhealth\/twitter_pnn\")\r\n```\r\nbut the preview is not working.\r\n\r\nAm I the one who added this dataset ? Yes\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3658","id":1120880395,"node_id":"I_kwDODunzps5Cz0cL","number":3658,"title":"Dataset viewer issue for *P3*","user":{"login":"jeffistyping","id":22351555,"node_id":"MDQ6VXNlcjIyMzUxNTU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22351555?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jeffistyping","html_url":"https:\/\/github.com\/jeffistyping","followers_url":"https:\/\/api.github.com\/users\/jeffistyping\/followers","following_url":"https:\/\/api.github.com\/users\/jeffistyping\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jeffistyping\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jeffistyping\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jeffistyping\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jeffistyping\/orgs","repos_url":"https:\/\/api.github.com\/users\/jeffistyping\/repos","events_url":"https:\/\/api.github.com\/users\/jeffistyping\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jeffistyping\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-01T15:57:56Z","updated_at":"2022-02-01T15:57:56Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*P3*'\r\n\r\n**Link: https:\/\/huggingface.co\/datasets\/bigscience\/P3**\r\n\r\n```\r\nStatus code: 400\r\nException: SplitsNotFoundError\r\nMessage: The split names could not be parsed from the dataset config.\r\n```\r\nAm I the one who added this dataset ? No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3657","id":1120602620,"node_id":"PR_kwDODunzps4x5f1I","number":3657,"title":"Extend dataset builder for streaming in `get_dataset_split_names`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-02-01T12:21:24Z","updated_at":"2022-02-03T22:49:06Z","closed_at":"2022-02-02T11:22:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3657","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3657","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3657.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3657.patch","merged_at":"2022-02-02T11:22:01Z"},"body":"Currently, `get_dataset_split_names` doesn't extend a builder module to support streaming, even though it uses `StreamingDownloadManager` to download data. This PR fixes that.\r\n\r\nTo test the change, run the following:\r\n```bash\r\npip install git+https:\/\/github.com\/huggingface\/datasets.git@fix-get_dataset_split_names-streaming\r\npython -c \"from datasets import get_dataset_split_names; print(get_dataset_split_names('facebook\/multilingual_librispeech', 'german', download_mode='force_redownload', revision='137923f945552c6afdd8b60e4a7b43e3088972c1'))\"\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3656","id":1120510823,"node_id":"I_kwDODunzps5CyaNn","number":3656,"title":"checksum error subjqa dataset","user":{"login":"RensDimmendaal","id":9828683,"node_id":"MDQ6VXNlcjk4Mjg2ODM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9828683?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RensDimmendaal","html_url":"https:\/\/github.com\/RensDimmendaal","followers_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/followers","following_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/orgs","repos_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/repos","events_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-02-01T10:53:33Z","updated_at":"2022-02-10T10:56:59Z","closed_at":"2022-02-10T10:56:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI get a checksum error when loading the `subjqa` dataset (used in the transformers book).\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nsubjqa = load_dataset(\"subjqa\",\"electronics\")\r\n```\r\n\r\n## Expected results\r\nLoading the dataset\r\n\r\n## Actual results\r\n\r\n```\r\n---------------------------------------------------------------------------\r\n\r\nNonMatchingChecksumError Traceback (most recent call last)\r\n\r\n in ()\r\n 2 from datasets import load_dataset\r\n 3 \r\n----> 4 subjqa = load_dataset(\"subjqa\",\"electronics\")\r\n\r\n3 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 38 if len(bad_urls) > 0:\r\n 39 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 40 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 41 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 42 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/github.com\/lewtun\/SubjQA\/archive\/refs\/heads\/master.zip']\r\n```\r\n\r\n## Environment info\r\n\r\nGoogle colab\r\n\r\n- `datasets` version: 1.18.2\r\n- Platform: Linux-5.4.144+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3655","id":1119801077,"node_id":"I_kwDODunzps5Cvs71","number":3655,"title":"Pubmed dataset not reachable","user":{"login":"abhi-mosaic","id":77638579,"node_id":"MDQ6VXNlcjc3NjM4NTc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/77638579?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhi-mosaic","html_url":"https:\/\/github.com\/abhi-mosaic","followers_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/followers","following_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/repos","events_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-01-31T18:45:47Z","updated_at":"2022-02-11T15:54:06Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nTrying to use the `pubmed` dataset fails to reach \/ download the source files.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\npubmed_train = datasets.load_dataset('pubmed', split='train')\r\n```\r\n\r\n## Expected results\r\nShould begin downloading the pubmed dataset.\r\n\r\n## Actual results\r\n```\r\nConnectionError: Couldn't reach ftp:\/\/ftp.ncbi.nlm.nih.gov\/pubmed\/baseline\/pubmed21n0865.xml.gz (InvalidSchema(\"No connection adapters were found for 'ftp:\/\/ftp.ncbi.nlm.nih.gov\/pubmed\/baseline\/pubmed21n0865.xml.gz'\"))\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.18.2\r\n- Platform: macOS-11.4-x86_64-i386-64bit\r\n- Python version: 3.8.2\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3654","id":1119717475,"node_id":"PR_kwDODunzps4x2kiX","number":3654,"title":"Better TQDM output","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-31T17:22:43Z","updated_at":"2022-02-03T15:55:34Z","closed_at":"2022-02-03T15:55:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3654","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3654","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3654.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3654.patch","merged_at":"2022-02-03T15:55:33Z"},"body":"This PR does the following:\r\n* if `dataset_infos.json` exists for a dataset, uses `num_examples` to print the total number of examples that needs to be generated (in `builder.py`)\r\n* fixes `tqdm` + multiprocessing in Jupyter Notebook\/Colab (the issue stems from this commit in the `tqdm` repo: https:\/\/github.com\/tqdm\/tqdm\/commit\/f7722edecc3010cb35cc1c923ac4850a76336f82) \r\n* adds the missing `drop_last_batch` and `with_ranks` params to `DatasetDict.map` \r\n* correctly computes the number of iterations in `map` and the CSV\/JSON loader when `batched=True` to fix `tqdm` progress bars\r\n* removes the `bool(logging.get_verbosity() == logging.NOTSET)` (or simplifies `bool(logging.get_verbosity() == logging.NOTSET) or not utils.is_progress_bar_enabled()` to `not utils.is_progress_bar_enabled()`) condition and uses `utils.is_progress_bar_enabled` to check if `tqdm` output is enabled (this comment from @stas00 explains why the `bool(logging.get_verbosity() == logging.NOTSET)` check is problematic: https:\/\/github.com\/huggingface\/transformers\/issues\/14889#issue-1087318463)\r\n\r\nFix #2630","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3653","id":1119186952,"node_id":"I_kwDODunzps5CtXAI","number":3653,"title":"`to_json` in multiprocessing fashion sometimes deadlock","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-31T09:35:07Z","updated_at":"2022-01-31T09:35:07Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n`to_json` in multiprocessing fashion sometimes deadlock, instead of raising exceptions. Temporary solution is to see that it deadlocks, and then reduce the number of processes or batch size in order to reduce the memory footprint.\r\n\r\nAs @lhoestq pointed out, this might be related to https:\/\/bugs.python.org\/issue22393#msg315684 where `multiprocessing` fails to raise the OOM exception. One suggested alternative is not use `concurrent.futures` instead.\r\n\r\n## Steps to reproduce the bug\r\n\r\n## Expected results\r\n\r\nScript fails when one worker hits OOM, and raise appropriate error.\r\n\r\n## Actual results\r\n\r\nDeadlock\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.1\r\n- Platform: Linux\r\n- Python version: 3.8\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3652","id":1118808738,"node_id":"PR_kwDODunzps4xzinr","number":3652,"title":"sp. Columbia => Colombia","user":{"login":"serapio","id":3781280,"node_id":"MDQ6VXNlcjM3ODEyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3781280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/serapio","html_url":"https:\/\/github.com\/serapio","followers_url":"https:\/\/api.github.com\/users\/serapio\/followers","following_url":"https:\/\/api.github.com\/users\/serapio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/serapio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/serapio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/serapio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/serapio\/orgs","repos_url":"https:\/\/api.github.com\/users\/serapio\/repos","events_url":"https:\/\/api.github.com\/users\/serapio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/serapio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-31T00:41:03Z","updated_at":"2022-02-09T16:55:25Z","closed_at":"2022-01-31T08:29:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3652","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3652","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3652.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3652.patch","merged_at":"2022-01-31T08:29:07Z"},"body":"\"Columbia\" is various places in North America. The country is \"Colombia\".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3651","id":1118597647,"node_id":"PR_kwDODunzps4xy3De","number":3651,"title":"Update link in wiki_bio dataset","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-30T16:28:54Z","updated_at":"2022-01-31T14:50:48Z","closed_at":"2022-01-31T08:38:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3651","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3651","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3651.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3651.patch","merged_at":"2022-01-31T08:38:09Z"},"body":"Fixes #3580 and makes the wiki_bio dataset work again. I changed the link and some documentation, and all the tests pass. Thanks @lhoestq for uploading the dataset to the HuggingFace data bucket.\r\n\r\n@lhoestq -- all the tests pass, but I'm still not able to import the dataset, as the old Google Drive link is cached somewhere:\r\n```python\r\n>>> from datasets import load_dataset\r\nload_dataset(\"wiki_bio>>> load_dataset(\"wiki_bio\")\r\nUsing custom data configuration default\r\nDownloading and preparing dataset wiki_bio\/default (download: 318.53 MiB, generated: 736.94 MiB, post-processed: Unknown size, total: 1.03 GiB) to \/home\/jxm3\/.cache\/huggingface\/datasets\/wiki_bio\/default\/1.1.0\/5293ce565954ba965dada626f1e79684e98172d950371d266bf3caaf87e911c9...\r\nTraceback (most recent call last):\r\n ...\r\n File \"\/home\/jxm3\/random\/datasets\/src\/datasets\/utils\/file_utils.py\", line 612, in get_from_cache\r\n raise FileNotFoundError(f\"Couldn't find file at {url}\")\r\nFileNotFoundError: Couldn't find file at https:\/\/drive.google.com\/uc?export=download&id=1L7aoUXzHPzyzQ0ns4ApBbYepsjFOtXil\r\n```\r\n\r\nwhat do I have to do to invalidate the cache and actually import the dataset? It's clearly set up correctly, since the data is downloaded and processed by the tests.\r\n\r\nAs an aside, this caching-loading-scripts behavior makes for a really bad developer experience. I just wasted an hour trying to figure out where the caching was happening and how to disable it, and I don't know. All I wanted to do was update the link and submit a pull request! I recommend that you all either change this behavior (i.e. updating the link to a dataset should \"just work\") or document it, since I couldn't find any information about this in the contributing.md or readme or anywhere else! Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3650","id":1118537429,"node_id":"PR_kwDODunzps4xyr2o","number":3650,"title":"Allow 'to_json' to run in unordered fashion in order to lower memory footprint","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-01-30T13:23:19Z","updated_at":"2022-02-01T17:49:21Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3650","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3650","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3650.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3650.patch","merged_at":null},"body":"I'm using `to_json(..., num_proc=num_proc, compressiong='gzip')` with `num_proc>1`. I'm having an issue where things seem to deadlock at some point. Eventually I see OOM. I'm guessing it's an issue where one process starts to take a long time for a specific batch, and so other process keep accumulating their results in memory.\r\n\r\nIn order to flush memory, I propose we use optional `imap_unordered`. This will prevent one process to block the other ones. The logical thinking is that index are rarily relevant, and in one wants to keep an index, one can still create another column and reconstruct from there.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3649","id":1117502250,"node_id":"I_kwDODunzps5Cm7sq","number":3649,"title":"Add IGLUE dataset","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608944167,"node_id":"LA_kwDODunzps7XHB4n","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/multimodal","name":"multimodal","color":"19E633","default":false,"description":"Multimodal datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-28T14:59:41Z","updated_at":"2022-01-28T15:02:35Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** IGLUE\r\n- **Description:** IGLUE brings together 4 vision-and-language tasks across 20 languages (Twitter [thread](https:\/\/twitter.com\/ebugliarello\/status\/1487045497583976455?s=20&t=SB4LZGDhhkUW83ugcX_m5w))\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2201.11732\r\n- **Data:** https:\/\/github.com\/e-bug\/iglue\r\n- **Motivation:** This dataset would provide a nice example of combining the text and image features of `datasets` together for multimodal applications.\r\n\r\nNote: the data \/ code are not yet visible on the GitHub repo, so I've pinged the authors for more information.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3648","id":1117465505,"node_id":"PR_kwDODunzps4xvXig","number":3648,"title":"Fix Windows CI: bump python to 3.7","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-28T14:24:54Z","updated_at":"2022-01-28T14:40:39Z","closed_at":"2022-01-28T14:40:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3648","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3648","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3648.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3648.patch","merged_at":"2022-01-28T14:40:39Z"},"body":"Python>=3.7 is needed to install `tokenizers` 0.11","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3647","id":1117383675,"node_id":"PR_kwDODunzps4xvGDQ","number":3647,"title":"Fix `add_column` on datasets with indices mapping","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-28T13:06:29Z","updated_at":"2022-01-28T15:35:58Z","closed_at":"2022-01-28T15:35:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3647","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3647","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3647.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3647.patch","merged_at":"2022-01-28T15:35:57Z"},"body":"My initial idea was to avoid the `flatten_indices` call and reorder a new column instead, but in the end I decided to follow `concatenate_datasets` and use `flatten_indices` to avoid padding when `dataset._indices.num_rows != dataset._data.num_rows`.\r\n\r\nFix #3599","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3646","id":1116544627,"node_id":"PR_kwDODunzps4xsX66","number":3646,"title":"Fix streaming datasets that are not reset correctly","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-27T17:21:02Z","updated_at":"2022-01-28T16:34:29Z","closed_at":"2022-01-28T16:34:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3646","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3646","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3646.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3646.patch","merged_at":"2022-01-28T16:34:28Z"},"body":"Streaming datasets that use `StreamingDownloadManager.iter_archive` and `StreamingDownloadManager.iter_files` had some issues. Indeed if you try to iterate over such dataset twice, then the second time it will be empty.\r\n\r\nThis is because the two methods above are generator functions. I fixed this by making them return iterables that are reset properly instead.\r\n\r\nClose https:\/\/github.com\/huggingface\/datasets\/issues\/3645\r\ncc @anton-l ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3645","id":1116541298,"node_id":"I_kwDODunzps5CjRFy","number":3645,"title":"Streaming dataset based on dl_manager.iter_archive\/iter_files are not reset correctly","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-27T17:17:41Z","updated_at":"2022-01-28T16:34:28Z","closed_at":"2022-01-28T16:34:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi ! When iterating over a streaming dataset once, it's not reset correctly because of some issues with `dl_manager.iter_archive` and `dl_manager.iter_files`. Indeed they are generator functions (so the iterator that is returned can be exhausted). They should be iterables instead, and be reset if we do a for loop again:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nd = load_dataset(\"common_voice\", \"ab\", split=\"test\", streaming=True)\r\n\r\ni = 0\r\nfor i, _ in enumerate(d):\r\n pass\r\nprint(i) # 8\r\n# let's do it again\r\ni = 0\r\nfor i, _ in enumerate(d):\r\n pass\r\nprint(i) # 0\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3644","id":1116519670,"node_id":"I_kwDODunzps5CjLz2","number":3644,"title":"Add a GROUP BY operator","user":{"login":"felix-schneider","id":208336,"node_id":"MDQ6VXNlcjIwODMzNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/208336?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/felix-schneider","html_url":"https:\/\/github.com\/felix-schneider","followers_url":"https:\/\/api.github.com\/users\/felix-schneider\/followers","following_url":"https:\/\/api.github.com\/users\/felix-schneider\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/felix-schneider\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/felix-schneider\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/felix-schneider\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/felix-schneider\/orgs","repos_url":"https:\/\/api.github.com\/users\/felix-schneider\/repos","events_url":"https:\/\/api.github.com\/users\/felix-schneider\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/felix-schneider\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-27T16:57:54Z","updated_at":"2022-02-08T15:06:10Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nUsing batch mapping, we can easily split examples. However, we lack an appropriate option for merging them back together by some key. Consider this example:\r\n\r\n```python\r\n# features:\r\n# {\r\n# \"example_id\": datasets.Value(\"int32\"),\r\n# \"text\": datasets.Value(\"string\")\r\n# }\r\n\r\nds = datasets.Dataset()\r\n\r\n\r\ndef split(examples):\r\n sentences = [text.split(\".\") for text in examples[\"text\"]]\r\n return {\r\n \"example_id\": [\r\n example_id\r\n for example_id, sents in zip(examples[\"example_id\"], sentences)\r\n for _ in sents\r\n ],\r\n \"sentence\": [sent for sents in sentences for sent in sents],\r\n \"sentence_id\": [i for sents in sentences for i in range(len(sents))],\r\n }\r\n\r\n\r\nsplit_ds = ds.map(split, batched=True)\r\n\r\n\r\ndef process(examples):\r\n outputs = some_neural_network_that_works_on_sentences(examples[\"sentence\"])\r\n return {\"outputs\": outputs}\r\n\r\n\r\nsplit_ds = split_ds.map(process, batched=True)\r\n```\r\n\r\nI have a dataset consisting of texts that I would like to process sentence by sentence in a batched way. Afterwards, I would like to put it back together as it was, merging the outputs together.\r\n\r\n**Describe the solution you'd like**\r\nIdeally, it would look something like this:\r\n\r\n```python\r\ndef join(examples):\r\n order = np.argsort(examples[\"sentence_id\"])\r\n text = \".\".join(examples[\"text\"][i] for i in order)\r\n outputs = [examples[\"outputs\"][i] for i in order]\r\n return {\"text\": text, \"outputs\": outputs}\r\n\r\n\r\nds = split_ds.group_by(\"example_id\", join)\r\n```\r\n\r\n**Describe alternatives you've considered**\r\nRight now, we can do this:\r\n```python\r\ndef merge(example):\r\n meeting_id = example[\"example_id\"]\r\n parts = split_ds.filter(lambda x: x[\"example_id\"] == meeting_id).sort(\"segment_no\")\r\n return {\"outputs\": list(parts[\"outputs\"])}\r\n\r\nds = ds.map(merge)\r\n```\r\n\r\nOf course, we could process the dataset like this:\r\n\r\n```python\r\ndef process(example):\r\n outputs = some_neural_network_that_works_on_sentences(example[\"text\"].split(\".\"))\r\n return {\"outputs\": outputs}\r\n\r\nds = ds.map(process, batched=True)\r\n```\r\n\r\nHowever, that does not allow using an arbitrary batch size and may lead to very inefficient use of resources if the batch size is much larger than the number of sentences in one example.\r\n\r\nI would very much appreciate some kind of group by operator to merge examples based on the value of one column.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3643","id":1116417428,"node_id":"PR_kwDODunzps4xr8mX","number":3643,"title":"Fix sem_eval_2018_task_1 download location","user":{"login":"maxpel","id":31095360,"node_id":"MDQ6VXNlcjMxMDk1MzYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31095360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxpel","html_url":"https:\/\/github.com\/maxpel","followers_url":"https:\/\/api.github.com\/users\/maxpel\/followers","following_url":"https:\/\/api.github.com\/users\/maxpel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxpel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxpel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxpel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxpel\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxpel\/repos","events_url":"https:\/\/api.github.com\/users\/maxpel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxpel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-27T15:45:00Z","updated_at":"2022-02-04T15:15:26Z","closed_at":"2022-02-04T15:15:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3643","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3643","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3643.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3643.patch","merged_at":"2022-02-04T15:15:26Z"},"body":"As discussed with @lhoestq in https:\/\/github.com\/huggingface\/datasets\/issues\/3549#issuecomment-1020176931_ this is the new pull request to fix the download location.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3642","id":1116306986,"node_id":"PR_kwDODunzps4xrj2S","number":3642,"title":"Fix dataset slicing with negative bounds when indices mapping is not `None`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-27T14:45:53Z","updated_at":"2022-01-27T18:16:23Z","closed_at":"2022-01-27T18:16:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3642","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3642","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3642.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3642.patch","merged_at":"2022-01-27T18:16:22Z"},"body":"Fix #3611 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3641","id":1116284268,"node_id":"PR_kwDODunzps4xre7C","number":3641,"title":"Fix numpy rngs when seed is None","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-27T14:29:09Z","updated_at":"2022-01-27T18:16:08Z","closed_at":"2022-01-27T18:16:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3641","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3641","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3641.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3641.patch","merged_at":"2022-01-27T18:16:07Z"},"body":"Fixes the NumPy RNG when `seed` is `None`.\r\n\r\nThe problem becomes obvious after reading the NumPy notes on RNG (returned by `np.random.get_state()`):\r\n> The MT19937 state vector consists of a 624-element array of 32-bit unsigned integers plus a single integer value between 0 and 624 that indexes the current position within the main array.\r\n\r\n`The MT19937 state vector`: the seed which we currently index, but this value stays the same for multiple rounds.\r\n`plus a single integer value`: the `pos` value in this PR (is 624 if `seed` is set to a fixed value with `np.random.seed`, so we take the first value in the `seed` array returned by `np.random.get_state()`: https:\/\/stackoverflow.com\/questions\/32172054\/how-can-i-retrieve-the-current-seed-of-numpys-random-number-generator)\r\n\r\nNumPy notes: https:\/\/numpy.org\/doc\/stable\/reference\/random\/bit_generators\/mt19937.html\r\n\r\nFix #3634 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3640","id":1116133769,"node_id":"I_kwDODunzps5ChtmJ","number":3640,"title":"Issues with custom dataset in Wav2Vec2","user":{"login":"peregilk","id":9079808,"node_id":"MDQ6VXNlcjkwNzk4MDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9079808?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/peregilk","html_url":"https:\/\/github.com\/peregilk","followers_url":"https:\/\/api.github.com\/users\/peregilk\/followers","following_url":"https:\/\/api.github.com\/users\/peregilk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/peregilk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/peregilk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/peregilk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/peregilk\/orgs","repos_url":"https:\/\/api.github.com\/users\/peregilk\/repos","events_url":"https:\/\/api.github.com\/users\/peregilk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/peregilk\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-27T12:09:05Z","updated_at":"2022-01-27T12:29:48Z","closed_at":"2022-01-27T12:29:48Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"We are training Vav2Vec using the run_speech_recognition_ctc_bnb.py-script.\r\n\r\nThis is working fine with Common Voice, however using our custom dataset and data loader at [NbAiLab\/NPSC]( https:\/\/huggingface.co\/datasets\/NbAiLab\/NPSC) it crashes after roughly 1 epoch with the following stack trace:\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/9079808\/151355893-6d5887cc-ca19-4b12-948a-124eb6dac372.png)\r\n\r\n\r\nWe are able to work around the issue, for instance by adding this check in line#222 in transformers\/models\/wav2vec2\/modeling_wav2vec2.py:\r\n```python\r\nif input_length - (mask_length - 1) < num_masked_span:\r\n num_masked_span = input_length - (mask_length - 1)\r\n```\r\nInterestingly, these are the variable values before the adjustment:\r\n```\r\ninput_length=10\r\nmask_length=10\r\nnum_masked_span=2\r\n````\r\nAfter adjusting num_masked_spin to 1, the training script runs. The issue is also fixed by setting \u201creplace=True\u201d in the same function.\r\n\r\nDo you have any idea what is causing this, and how to fix this error permanently? If you do not think this is an Datasets issue, feel free to move the issue.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3639","id":1116021420,"node_id":"I_kwDODunzps5ChSKs","number":3639,"title":"same value of precision, recall, f1 score at each epoch for classification task. ","user":{"login":"Dhanachandra","id":10828657,"node_id":"MDQ6VXNlcjEwODI4NjU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10828657?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dhanachandra","html_url":"https:\/\/github.com\/Dhanachandra","followers_url":"https:\/\/api.github.com\/users\/Dhanachandra\/followers","following_url":"https:\/\/api.github.com\/users\/Dhanachandra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dhanachandra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dhanachandra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dhanachandra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dhanachandra\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dhanachandra\/repos","events_url":"https:\/\/api.github.com\/users\/Dhanachandra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dhanachandra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-27T10:14:16Z","updated_at":"2022-02-09T16:11:49Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**1st Epoch:** \r\n1\/27\/2022 09:30:48 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/f1\/default\/default_experiment-1-0.arrow.59it\/s]\r\n01\/27\/2022 09:30:48 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/precision\/default\/default_experiment-1-0.arrow\r\n01\/27\/2022 09:30:49 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/recall\/default\/default_experiment-1-0.arrow\r\nPRECISION: {'precision': 0.7612903225806451}\r\nRECALL: {'recall': 0.7612903225806451}\r\nF1: {'f1': 0.7612903225806451}\r\n{'eval_loss': 1.4658324718475342, 'eval_accuracy': 0.7612903118133545, 'eval_runtime': 30.0054, 'eval_samples_per_second': 46.492, 'eval_steps_per_second': 46.492, 'epoch': 3.0} \r\n**4th Epoch:**\r\n1\/27\/2022 09:56:55 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/f1\/default\/default_experiment-1-0.arrow.92it\/s]\r\n01\/27\/2022 09:56:56 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/precision\/default\/default_experiment-1-0.arrow\r\n01\/27\/2022 09:56:56 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/recall\/default\/default_experiment-1-0.arrow\r\nPRECISION: {'precision': 0.7698924731182796}\r\nRECALL: {'recall': 0.7698924731182796}\r\nF1: {'f1': 0.7698924731182796}\r\n\r\n\r\n## Environment info\r\n!git clone https:\/\/github.com\/huggingface\/transformers\r\n%cd transformers\r\n!pip install .\r\n!pip install -r \/content\/transformers\/examples\/pytorch\/token-classification\/requirements.txt\r\n!pip install datasets","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3638","id":1115725703,"node_id":"I_kwDODunzps5CgJ-H","number":3638,"title":"AutoTokenizer hash value got change after datasets.map","user":{"login":"tshu-w","id":13161779,"node_id":"MDQ6VXNlcjEzMTYxNzc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13161779?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tshu-w","html_url":"https:\/\/github.com\/tshu-w","followers_url":"https:\/\/api.github.com\/users\/tshu-w\/followers","following_url":"https:\/\/api.github.com\/users\/tshu-w\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tshu-w\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tshu-w\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tshu-w\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tshu-w\/orgs","repos_url":"https:\/\/api.github.com\/users\/tshu-w\/repos","events_url":"https:\/\/api.github.com\/users\/tshu-w\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tshu-w\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":9,"created_at":"2022-01-27T03:19:03Z","updated_at":"2022-01-28T03:20:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAutoTokenizer hash value got change after datasets.map\r\n\r\n## Steps to reproduce the bug\r\n1. trash huggingface datasets cache\r\n2. run the following code:\r\n```python\r\nfrom transformers import AutoTokenizer, BertTokenizer\r\nfrom datasets import load_dataset\r\nfrom datasets.fingerprint import Hasher\r\ntokenizer = AutoTokenizer.from_pretrained('bert-base-uncased')\r\n\r\ndef tokenize_function(example):\r\n return tokenizer(example[\"sentence1\"], example[\"sentence2\"], truncation=True)\r\n\r\nraw_datasets = load_dataset(\"glue\", \"mrpc\")\r\n\r\nprint(Hasher.hash(tokenize_function))\r\nprint(Hasher.hash(tokenizer))\r\n\r\ntokenized_datasets = raw_datasets.map(tokenize_function, batched=True)\r\n\r\nprint(Hasher.hash(tokenize_function))\r\nprint(Hasher.hash(tokenizer))\r\n```\r\ngot\r\n```\r\nReusing dataset glue (\/home1\/wts\/.cache\/huggingface\/datasets\/glue\/mrpc\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 3\/3 [00:00<00:00, 1112.35it\/s]\r\nf4976bb4694ebc51\r\n3fca35a1fd4a1251\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 4\/4 [00:00<00:00, 6.96ba\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 15.25ba\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 5.81ba\/s]\r\nd32837619b7d7d01\r\n5fd925c82edd62b6\r\n```\r\n3. run raw_datasets.map(tokenize_function, batched=True) again and see some dataset are not using cache.\r\n\r\n## Expected results\r\n`AutoTokenizer` work like specific Tokenizer (The hash value don't change after map):\r\n```python\r\nfrom transformers import AutoTokenizer, BertTokenizer\r\nfrom datasets import load_dataset\r\nfrom datasets.fingerprint import Hasher\r\ntokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\r\n\r\ndef tokenize_function(example):\r\n return tokenizer(example[\"sentence1\"], example[\"sentence2\"], truncation=True)\r\n\r\nraw_datasets = load_dataset(\"glue\", \"mrpc\")\r\n\r\nprint(Hasher.hash(tokenize_function))\r\nprint(Hasher.hash(tokenizer))\r\n\r\ntokenized_datasets = raw_datasets.map(tokenize_function, batched=True)\r\n\r\nprint(Hasher.hash(tokenize_function))\r\nprint(Hasher.hash(tokenizer))\r\n```\r\n\r\n```\r\nReusing dataset glue (\/home1\/wts\/.cache\/huggingface\/datasets\/glue\/mrpc\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 3\/3 [00:00<00:00, 1091.22it\/s]\r\n46d4b31f54153fc7\r\n5b8771afd8d43888\r\nLoading cached processed dataset at \/home1\/wts\/.cache\/huggingface\/datasets\/glue\/mrpc\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad\/cache-6b07ff82ae9d5c51.arrow\r\nLoading cached processed dataset at \/home1\/wts\/.cache\/huggingface\/datasets\/glue\/mrpc\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad\/cache-af738a6d84f3864b.arrow\r\nLoading cached processed dataset at \/home1\/wts\/.cache\/huggingface\/datasets\/glue\/mrpc\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad\/cache-531d2a603ba713c1.arrow\r\n46d4b31f54153fc7\r\n5b8771afd8d43888\r\n```\r\n\r\n\r\n## Environment info\r\n- `datasets` version: 1.18.0\r\n- Platform: Linux-5.4.0-91-generic-x86_64-with-glibc2.27\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3637","id":1115526438,"node_id":"I_kwDODunzps5CfZUm","number":3637,"title":"[TypeError: Couldn't cast array of type] Cannot load dataset in v1.18","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-26T21:38:02Z","updated_at":"2022-02-09T16:15:53Z","closed_at":"2022-02-09T16:15:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to load the [`GEM\/RiSAWOZ` dataset](https:\/\/huggingface.co\/datasets\/GEM\/RiSAWOZ) in `datasets` v1.18.1 and am running into a type error when casting the features. The strange thing is that I can load the dataset with v1.17.0. Note that the error is also present if I install from `master` too.\r\n\r\nAs far as I can tell, the dataset loading script is correct and the problematic features [here](https:\/\/huggingface.co\/datasets\/GEM\/RiSAWOZ\/blob\/main\/RiSAWOZ.py#L237) also look fine to me.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndset = load_dataset(\"GEM\/RiSAWOZ\")\r\n```\r\n\r\n## Expected results\r\nI can load the dataset without error.\r\n\r\n## Actual results\r\n\r\n
Traceback<\/summary>\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/builder.py in _prepare_split(self, split_generator)\r\n 1083 example = self.info.features.encode_example(record)\r\n-> 1084 writer.write(example, key)\r\n 1085 finally:\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in write(self, example, key, writer_batch_size)\r\n 445 \r\n--> 446 self.write_examples_on_file()\r\n 447 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in write_examples_on_file(self)\r\n 403 batch_examples[col] = [row[0][col] for row in self.current_examples]\r\n--> 404 self.write_batch(batch_examples=batch_examples)\r\n 405 self.current_examples = []\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in write_batch(self, batch_examples, writer_batch_size)\r\n 496 typed_sequence = OptimizedTypedSequence(batch_examples[col], type=col_type, try_type=col_try_type, col=col)\r\n--> 497 arrays.append(pa.array(typed_sequence))\r\n 498 inferred_features[col] = typed_sequence.get_inferred_type()\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/pyarrow\/array.pxi in pyarrow.lib.array()\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/pyarrow\/array.pxi in pyarrow.lib._handle_arrow_array_protocol()\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in __arrow_array__(self, type)\r\n 204 # We only do it if trying_type is False - since this is what the user asks for.\r\n--> 205 out = cast_array_to_feature(out, type, allow_number_to_str=not self.trying_type)\r\n 206 return out\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1064 if isinstance(feature, list):\r\n-> 1065 return pa.ListArray.from_arrays(array.offsets, _c(array.values, feature[0]))\r\n 1066 elif isinstance(feature, Sequence):\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in (.0)\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in (.0)\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1086 return array_cast(array, feature(), allow_number_to_str=allow_number_to_str)\r\n-> 1087 raise TypeError(f\"Couldn't cast array of type\\n{array.type}\\nto\\n{feature}\")\r\n 1088 \r\n\r\nTypeError: Couldn't cast array of type\r\nstruct<\u533b\u9662-3.0T MRI: string, \u533b\u9662-CT: string, \u533b\u9662-DSA: string, \u533b\u9662-\u516c\u4ea4\u7ebf\u8def: string, \u533b\u9662-\u533a\u57df: string, \u533b\u9662-\u540d\u79f0: string, \u533b\u9662-\u5730\u5740: string, \u533b\u9662-\u5730\u94c1\u53ef\u8fbe: string, \u533b\u9662-\u5730\u94c1\u7ebf\u8def: string, \u533b\u9662-\u6027\u8d28: string, \u533b\u9662-\u6302\u53f7\u65f6\u95f4: string, \u533b\u9662-\u7535\u8bdd: string, \u533b\u9662-\u7b49\u7ea7: string, \u533b\u9662-\u7c7b\u522b: string, \u533b\u9662-\u91cd\u70b9\u79d1\u5ba4: string, \u533b\u9662-\u95e8\u8bca\u65f6\u95f4: string, \u5929\u6c14-\u57ce\u5e02: string, \u5929\u6c14-\u5929\u6c14: string, \u5929\u6c14-\u65e5\u671f: string, \u5929\u6c14-\u6e29\u5ea6: string, \u5929\u6c14-\u7d2b\u5916\u7ebf\u5f3a\u5ea6: string, \u5929\u6c14-\u98ce\u529b\u98ce\u5411: string, \u65c5\u6e38\u666f\u70b9-\u533a\u57df: string, \u65c5\u6e38\u666f\u70b9-\u540d\u79f0: string, \u65c5\u6e38\u666f\u70b9-\u5730\u5740: string, \u65c5\u6e38\u666f\u70b9-\u5f00\u653e\u65f6\u95f4: string, \u65c5\u6e38\u666f\u70b9-\u662f\u5426\u5730\u94c1\u76f4\u8fbe: string, \u65c5\u6e38\u666f\u70b9-\u666f\u70b9\u7c7b\u578b: string, \u65c5\u6e38\u666f\u70b9-\u6700\u9002\u5408\u4eba\u7fa4: string, \u65c5\u6e38\u666f\u70b9-\u6d88\u8d39: string, \u65c5\u6e38\u666f\u70b9-\u7279\u70b9: string, \u65c5\u6e38\u666f\u70b9-\u7535\u8bdd\u53f7\u7801: string, \u65c5\u6e38\u666f\u70b9-\u8bc4\u5206: string, \u65c5\u6e38\u666f\u70b9-\u95e8\u7968\u4ef7\u683c: string, \u6c7d\u8f66-\u4ef7\u683c(\u4e07\u5143): string, \u6c7d\u8f66-\u5012\u8f66\u5f71\u50cf: string, \u6c7d\u8f66-\u52a8\u529b\u6c34\u5e73: string, \u6c7d\u8f66-\u5382\u5546: string, \u6c7d\u8f66-\u53d1\u52a8\u673a\u6392\u91cf(L): string, \u6c7d\u8f66-\u53d1\u52a8\u673a\u9a6c\u529b(Ps): string, \u6c7d\u8f66-\u540d\u79f0: string, \u6c7d\u8f66-\u5b9a\u901f\u5de1\u822a: string, \u6c7d\u8f66-\u5de1\u822a\u7cfb\u7edf: string, \u6c7d\u8f66-\u5ea7\u4f4d\u6570: string, \u6c7d\u8f66-\u5ea7\u6905\u52a0\u70ed: string, \u6c7d\u8f66-\u5ea7\u6905\u901a\u98ce: string, \u6c7d\u8f66-\u6240\u5c5e\u4ef7\u683c\u533a\u95f4: string, \u6c7d\u8f66-\u6cb9\u8017\u6c34\u5e73: string, \u6c7d\u8f66-\u73af\u4fdd\u6807\u51c6: string, \u6c7d\u8f66-\u7ea7\u522b: string, \u6c7d\u8f66-\u7efc\u5408\u6cb9\u8017(L\/100km): string, \u6c7d\u8f66-\u80fd\u6e90\u7c7b\u578b: string, \u6c7d\u8f66-\u8f66\u578b: string, \u6c7d\u8f66-\u8f66\u7cfb: string, \u6c7d\u8f66-\u8f66\u8eab\u5c3a\u5bf8(mm): string, \u6c7d\u8f66-\u9a71\u52a8\u65b9\u5f0f: string, \u6c7d\u8f66-\u9a7e\u9a76\u8f85\u52a9\u5f71\u50cf: string, \u706b\u8f66-\u51fa\u53d1\u5730: string, \u706b\u8f66-\u51fa\u53d1\u65f6\u95f4: string, \u706b\u8f66-\u5230\u8fbe\u65f6\u95f4: string, \u706b\u8f66-\u5750\u5e2d: string, \u706b\u8f66-\u65e5\u671f: string, \u706b\u8f66-\u65f6\u957f: string, \u706b\u8f66-\u76ee\u7684\u5730: string, \u706b\u8f66-\u7968\u4ef7: string, \u706b\u8f66-\u8231\u4f4d\u6863\u6b21: string, \u706b\u8f66-\u8f66\u578b: string, \u706b\u8f66-\u8f66\u6b21\u4fe1\u606f: string, \u7535\u5f71-\u4e3b\u6f14: string, \u7535\u5f71-\u4e3b\u6f14\u540d\u5355: string, \u7535\u5f71-\u5177\u4f53\u4e0a\u6620\u65f6\u95f4: string, \u7535\u5f71-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a: string, \u7535\u5f71-\u5bfc\u6f14: string, \u7535\u5f71-\u5e74\u4ee3: string, \u7535\u5f71-\u7247\u540d: string, \u7535\u5f71-\u7247\u957f: string, \u7535\u5f71-\u7c7b\u578b: string, \u7535\u5f71-\u8c46\u74e3\u8bc4\u5206: string, \u7535\u8111-CPU: string, \u7535\u8111-CPU\u578b\u53f7: string, \u7535\u8111-\u4ea7\u54c1\u7c7b\u522b: string, \u7535\u8111-\u4ef7\u683c: string, \u7535\u8111-\u4ef7\u683c\u533a\u95f4: string, \u7535\u8111-\u5185\u5b58\u5bb9\u91cf: string, \u7535\u8111-\u5206\u7c7b: string, \u7535\u8111-\u54c1\u724c: string, \u7535\u8111-\u5546\u54c1\u540d\u79f0: string, \u7535\u8111-\u5c4f\u5e55\u5c3a\u5bf8: string, \u7535\u8111-\u5f85\u673a\u65f6\u957f: string, \u7535\u8111-\u663e\u5361\u578b\u53f7: string, \u7535\u8111-\u663e\u5361\u7c7b\u522b: string, \u7535\u8111-\u6e38\u620f\u6027\u80fd: string, \u7535\u8111-\u7279\u6027: string, \u7535\u8111-\u786c\u76d8\u5bb9\u91cf: string, \u7535\u8111-\u7cfb\u5217: string, \u7535\u8111-\u7cfb\u7edf: string, \u7535\u8111-\u8272\u7cfb: string, \u7535\u8111-\u88f8\u673a\u91cd\u91cf: string, \u7535\u89c6\u5267-\u4e3b\u6f14: string, \u7535\u89c6\u5267-\u4e3b\u6f14\u540d\u5355: string, \u7535\u89c6\u5267-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a: string, \u7535\u89c6\u5267-\u5355\u96c6\u7247\u957f: string, \u7535\u89c6\u5267-\u5bfc\u6f14: string, \u7535\u89c6\u5267-\u5e74\u4ee3: string, \u7535\u89c6\u5267-\u7247\u540d: string, \u7535\u89c6\u5267-\u7c7b\u578b: string, \u7535\u89c6\u5267-\u8c46\u74e3\u8bc4\u5206: string, \u7535\u89c6\u5267-\u96c6\u6570: string, \u7535\u89c6\u5267-\u9996\u64ad\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65b9\u5f0f: string, \u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4e0b\u8bfe\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4ef7\u683c: string, \u8f85\u5bfc\u73ed-\u533a\u57df: string, \u8f85\u5bfc\u73ed-\u5e74\u7ea7: string, \u8f85\u5bfc\u73ed-\u5f00\u59cb\u65e5\u671f: string, \u8f85\u5bfc\u73ed-\u6559\u5ba4\u5730\u70b9: string, \u8f85\u5bfc\u73ed-\u6559\u5e08: string, \u8f85\u5bfc\u73ed-\u6559\u5e08\u7f51\u5740: string, \u8f85\u5bfc\u73ed-\u65f6\u6bb5: string, \u8f85\u5bfc\u73ed-\u6821\u533a: string, \u8f85\u5bfc\u73ed-\u6bcf\u5468: string, \u8f85\u5bfc\u73ed-\u73ed\u53f7: string, \u8f85\u5bfc\u73ed-\u79d1\u76ee: string, \u8f85\u5bfc\u73ed-\u7ed3\u675f\u65e5\u671f: string, \u8f85\u5bfc\u73ed-\u8bfe\u65f6: string, \u8f85\u5bfc\u73ed-\u8bfe\u6b21: string, \u8f85\u5bfc\u73ed-\u8bfe\u7a0b\u7f51\u5740: string, \u8f85\u5bfc\u73ed-\u96be\u5ea6: string, \u901a\u7528-\u4ea7\u54c1\u7c7b\u522b: string, \u901a\u7528-\u4ef7\u683c\u533a\u95f4: string, \u901a\u7528-\u54c1\u724c: string, \u901a\u7528-\u7cfb\u5217: string, \u9152\u5e97-\u4ef7\u4f4d: string, \u9152\u5e97-\u505c\u8f66\u573a: string, \u9152\u5e97-\u533a\u57df: string, \u9152\u5e97-\u540d\u79f0: string, \u9152\u5e97-\u5730\u5740: string, \u9152\u5e97-\u623f\u578b: string, \u9152\u5e97-\u623f\u8d39: string, \u9152\u5e97-\u661f\u7ea7: string, \u9152\u5e97-\u7535\u8bdd\u53f7\u7801: string, \u9152\u5e97-\u8bc4\u5206: string, \u9152\u5e97-\u9152\u5e97\u7c7b\u578b: string, \u98de\u673a-\u51c6\u70b9\u7387: string, \u98de\u673a-\u51fa\u53d1\u5730: string, \u98de\u673a-\u5230\u8fbe\u65f6\u95f4: string, \u98de\u673a-\u65e5\u671f: string, \u98de\u673a-\u76ee\u7684\u5730: string, \u98de\u673a-\u7968\u4ef7: string, \u98de\u673a-\u822a\u73ed\u4fe1\u606f: string, \u98de\u673a-\u8231\u4f4d\u6863\u6b21: string, \u98de\u673a-\u8d77\u98de\u65f6\u95f4: string, \u9910\u5385-\u4eba\u5747\u6d88\u8d39: string, \u9910\u5385-\u4ef7\u4f4d: string, \u9910\u5385-\u533a\u57df: string, \u9910\u5385-\u540d\u79f0: string, \u9910\u5385-\u5730\u5740: string, \u9910\u5385-\u63a8\u8350\u83dc: string, \u9910\u5385-\u662f\u5426\u5730\u94c1\u76f4\u8fbe: string, \u9910\u5385-\u7535\u8bdd\u53f7\u7801: string, \u9910\u5385-\u83dc\u7cfb: string, \u9910\u5385-\u8425\u4e1a\u65f6\u95f4: string, \u9910\u5385-\u8bc4\u5206: string>\r\nto\r\n{'\u65c5\u6e38\u666f\u70b9-\u540d\u79f0': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u533a\u57df': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u666f\u70b9\u7c7b\u578b': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u6700\u9002\u5408\u4eba\u7fa4': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u6d88\u8d39': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u662f\u5426\u5730\u94c1\u76f4\u8fbe': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u95e8\u7968\u4ef7\u683c': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u5730\u5740': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u8bc4\u5206': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u5f00\u653e\u65f6\u95f4': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u7279\u70b9': Value(dtype='string', id=None), '\u9910\u5385-\u540d\u79f0': Value(dtype='string', id=None), '\u9910\u5385-\u533a\u57df': Value(dtype='string', id=None), '\u9910\u5385-\u83dc\u7cfb': Value(dtype='string', id=None), '\u9910\u5385-\u4ef7\u4f4d': Value(dtype='string', id=None), '\u9910\u5385-\u662f\u5426\u5730\u94c1\u76f4\u8fbe': Value(dtype='string', id=None), '\u9910\u5385-\u4eba\u5747\u6d88\u8d39': Value(dtype='string', id=None), '\u9910\u5385-\u5730\u5740': Value(dtype='string', id=None), '\u9910\u5385-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u9910\u5385-\u8bc4\u5206': Value(dtype='string', id=None), '\u9910\u5385-\u8425\u4e1a\u65f6\u95f4': Value(dtype='string', id=None), '\u9910\u5385-\u63a8\u8350\u83dc': Value(dtype='string', id=None), '\u9152\u5e97-\u540d\u79f0': Value(dtype='string', id=None), '\u9152\u5e97-\u533a\u57df': Value(dtype='string', id=None), '\u9152\u5e97-\u661f\u7ea7': Value(dtype='string', id=None), '\u9152\u5e97-\u4ef7\u4f4d': Value(dtype='string', id=None), '\u9152\u5e97-\u9152\u5e97\u7c7b\u578b': Value(dtype='string', id=None), '\u9152\u5e97-\u623f\u578b': Value(dtype='string', id=None), '\u9152\u5e97-\u505c\u8f66\u573a': Value(dtype='string', id=None), '\u9152\u5e97-\u623f\u8d39': Value(dtype='string', id=None), '\u9152\u5e97-\u5730\u5740': Value(dtype='string', id=None), '\u9152\u5e97-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u9152\u5e97-\u8bc4\u5206': Value(dtype='string', id=None), '\u7535\u8111-\u54c1\u724c': Value(dtype='string', id=None), '\u7535\u8111-\u4ea7\u54c1\u7c7b\u522b': Value(dtype='string', id=None), '\u7535\u8111-\u5206\u7c7b': Value(dtype='string', id=None), '\u7535\u8111-\u5185\u5b58\u5bb9\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u5c4f\u5e55\u5c3a\u5bf8': Value(dtype='string', id=None), '\u7535\u8111-CPU': Value(dtype='string', id=None), '\u7535\u8111-\u4ef7\u683c\u533a\u95f4': Value(dtype='string', id=None), '\u7535\u8111-\u7cfb\u5217': Value(dtype='string', id=None), '\u7535\u8111-\u5546\u54c1\u540d\u79f0': Value(dtype='string', id=None), '\u7535\u8111-\u7cfb\u7edf': Value(dtype='string', id=None), '\u7535\u8111-\u6e38\u620f\u6027\u80fd': Value(dtype='string', id=None), '\u7535\u8111-CPU\u578b\u53f7': Value(dtype='string', id=None), '\u7535\u8111-\u88f8\u673a\u91cd\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u663e\u5361\u7c7b\u522b': Value(dtype='string', id=None), '\u7535\u8111-\u663e\u5361\u578b\u53f7': Value(dtype='string', id=None), '\u7535\u8111-\u7279\u6027': Value(dtype='string', id=None), '\u7535\u8111-\u8272\u7cfb': Value(dtype='string', id=None), '\u7535\u8111-\u5f85\u673a\u65f6\u957f': Value(dtype='string', id=None), '\u7535\u8111-\u786c\u76d8\u5bb9\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u4ef7\u683c': Value(dtype='string', id=None), '\u706b\u8f66-\u51fa\u53d1\u5730': Value(dtype='string', id=None), '\u706b\u8f66-\u76ee\u7684\u5730': Value(dtype='string', id=None), '\u706b\u8f66-\u65e5\u671f': Value(dtype='string', id=None), '\u706b\u8f66-\u8f66\u578b': Value(dtype='string', id=None), '\u706b\u8f66-\u5750\u5e2d': Value(dtype='string', id=None), '\u706b\u8f66-\u8f66\u6b21\u4fe1\u606f': Value(dtype='string', id=None), '\u706b\u8f66-\u65f6\u957f': Value(dtype='string', id=None), '\u706b\u8f66-\u51fa\u53d1\u65f6\u95f4': Value(dtype='string', id=None), '\u706b\u8f66-\u5230\u8fbe\u65f6\u95f4': Value(dtype='string', id=None), '\u706b\u8f66-\u7968\u4ef7': Value(dtype='string', id=None), '\u98de\u673a-\u51fa\u53d1\u5730': Value(dtype='string', id=None), '\u98de\u673a-\u76ee\u7684\u5730': Value(dtype='string', id=None), '\u98de\u673a-\u65e5\u671f': Value(dtype='string', id=None), '\u98de\u673a-\u8231\u4f4d\u6863\u6b21': Value(dtype='string', id=None), '\u98de\u673a-\u822a\u73ed\u4fe1\u606f': Value(dtype='string', id=None), '\u98de\u673a-\u8d77\u98de\u65f6\u95f4': Value(dtype='string', id=None), '\u98de\u673a-\u5230\u8fbe\u65f6\u95f4': Value(dtype='string', id=None), '\u98de\u673a-\u7968\u4ef7': Value(dtype='string', id=None), '\u98de\u673a-\u51c6\u70b9\u7387': Value(dtype='string', id=None), '\u5929\u6c14-\u57ce\u5e02': Value(dtype='string', id=None), '\u5929\u6c14-\u65e5\u671f': Value(dtype='string', id=None), '\u5929\u6c14-\u5929\u6c14': Value(dtype='string', id=None), '\u5929\u6c14-\u6e29\u5ea6': Value(dtype='string', id=None), '\u5929\u6c14-\u98ce\u529b\u98ce\u5411': Value(dtype='string', id=None), '\u5929\u6c14-\u7d2b\u5916\u7ebf\u5f3a\u5ea6': Value(dtype='string', id=None), '\u7535\u5f71-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a': Value(dtype='string', id=None), '\u7535\u5f71-\u7c7b\u578b': Value(dtype='string', id=None), '\u7535\u5f71-\u5e74\u4ee3': Value(dtype='string', id=None), '\u7535\u5f71-\u4e3b\u6f14': Value(dtype='string', id=None), '\u7535\u5f71-\u5bfc\u6f14': Value(dtype='string', id=None), '\u7535\u5f71-\u7247\u540d': Value(dtype='string', id=None), '\u7535\u5f71-\u4e3b\u6f14\u540d\u5355': Value(dtype='string', id=None), '\u7535\u5f71-\u5177\u4f53\u4e0a\u6620\u65f6\u95f4': Value(dtype='string', id=None), '\u7535\u5f71-\u7247\u957f': Value(dtype='string', id=None), '\u7535\u5f71-\u8c46\u74e3\u8bc4\u5206': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u7c7b\u578b': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5e74\u4ee3': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u4e3b\u6f14': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5bfc\u6f14': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u7247\u540d': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u4e3b\u6f14\u540d\u5355': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u9996\u64ad\u65f6\u95f4': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u96c6\u6570': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5355\u96c6\u7247\u957f': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u8c46\u74e3\u8bc4\u5206': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u73ed\u53f7': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u96be\u5ea6': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u79d1\u76ee': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u5e74\u7ea7': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u533a\u57df': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6821\u533a': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65b9\u5f0f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u5f00\u59cb\u65e5\u671f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u7ed3\u675f\u65e5\u671f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6bcf\u5468': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65f6\u95f4': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0b\u8bfe\u65f6\u95f4': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u65f6\u6bb5': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u6b21': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u65f6': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5ba4\u5730\u70b9': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5e08': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4ef7\u683c': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u7a0b\u7f51\u5740': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5e08\u7f51\u5740': Value(dtype='string', id=None), '\u6c7d\u8f66-\u540d\u79f0': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u578b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u7ea7\u522b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u4f4d\u6570': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u8eab\u5c3a\u5bf8(mm)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5382\u5546': Value(dtype='string', id=None), '\u6c7d\u8f66-\u80fd\u6e90\u7c7b\u578b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u53d1\u52a8\u673a\u6392\u91cf(L)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u53d1\u52a8\u673a\u9a6c\u529b(Ps)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u9a71\u52a8\u65b9\u5f0f': Value(dtype='string', id=None), '\u6c7d\u8f66-\u7efc\u5408\u6cb9\u8017(L\/100km)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u73af\u4fdd\u6807\u51c6': Value(dtype='string', id=None), '\u6c7d\u8f66-\u9a7e\u9a76\u8f85\u52a9\u5f71\u50cf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5de1\u822a\u7cfb\u7edf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u4ef7\u683c(\u4e07\u5143)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u7cfb': Value(dtype='string', id=None), '\u6c7d\u8f66-\u52a8\u529b\u6c34\u5e73': Value(dtype='string', id=None), '\u6c7d\u8f66-\u6cb9\u8017\u6c34\u5e73': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5012\u8f66\u5f71\u50cf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5b9a\u901f\u5de1\u822a': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u6905\u52a0\u70ed': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u6905\u901a\u98ce': Value(dtype='string', id=None), '\u6c7d\u8f66-\u6240\u5c5e\u4ef7\u683c\u533a\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u540d\u79f0': Value(dtype='string', id=None), '\u533b\u9662-\u7b49\u7ea7': Value(dtype='string', id=None), '\u533b\u9662-\u7c7b\u522b': Value(dtype='string', id=None), '\u533b\u9662-\u6027\u8d28': Value(dtype='string', id=None), '\u533b\u9662-\u533a\u57df': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u5740': Value(dtype='string', id=None), '\u533b\u9662-\u7535\u8bdd': Value(dtype='string', id=None), '\u533b\u9662-\u6302\u53f7\u65f6\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u95e8\u8bca\u65f6\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u516c\u4ea4\u7ebf\u8def': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u94c1\u53ef\u8fbe': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u94c1\u7ebf\u8def': Value(dtype='string', id=None), '\u533b\u9662-\u91cd\u70b9\u79d1\u5ba4': Value(dtype='string', id=None), '\u533b\u9662-CT': Value(dtype='string', id=None), '\u533b\u9662-3.0T MRI': Value(dtype='string', id=None), '\u533b\u9662-DSA': Value(dtype='string', id=None)}\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTypeError Traceback (most recent call last)\r\n\/var\/folders\/28\/k4cy5q7s2hs92xq7_h89_vgm0000gn\/T\/ipykernel_44306\/2896005239.py in \r\n----> 1 dset = load_dataset(\"GEM\/RiSAWOZ\")\r\n 2 dset\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, revision, use_auth_token, task, streaming, script_version, **config_kwargs)\r\n 1692 \r\n 1693 # Download and prepare data\r\n-> 1694 builder_instance.download_and_prepare(\r\n 1695 download_config=download_config,\r\n 1696 download_mode=download_mode,\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 593 logger.warning(\"HF google storage unreachable. Downloading and preparing it from source\")\r\n 594 if not downloaded_from_gcs:\r\n--> 595 self._download_and_prepare(\r\n 596 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 597 )\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 682 try:\r\n 683 # Prepare split will record examples associated to the split\r\n--> 684 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 685 except OSError as e:\r\n 686 raise OSError(\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/builder.py in _prepare_split(self, split_generator)\r\n 1084 writer.write(example, key)\r\n 1085 finally:\r\n-> 1086 num_examples, num_bytes = writer.finalize()\r\n 1087 \r\n 1088 split_generator.split_info.num_examples = num_examples\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in finalize(self, close_stream)\r\n 525 # Re-intializing to empty list for next batch\r\n 526 self.hkey_record = []\r\n--> 527 self.write_examples_on_file()\r\n 528 if self.pa_writer is None:\r\n 529 if self.schema:\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in write_examples_on_file(self)\r\n 402 # Since current_examples contains (example, key) tuples\r\n 403 batch_examples[col] = [row[0][col] for row in self.current_examples]\r\n--> 404 self.write_batch(batch_examples=batch_examples)\r\n 405 self.current_examples = []\r\n 406 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in write_batch(self, batch_examples, writer_batch_size)\r\n 495 col_try_type = try_features[col] if try_features is not None and col in try_features else None\r\n 496 typed_sequence = OptimizedTypedSequence(batch_examples[col], type=col_type, try_type=col_try_type, col=col)\r\n--> 497 arrays.append(pa.array(typed_sequence))\r\n 498 inferred_features[col] = typed_sequence.get_inferred_type()\r\n 499 schema = inferred_features.arrow_schema if self.pa_writer is None else self.schema\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/pyarrow\/array.pxi in pyarrow.lib.array()\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/pyarrow\/array.pxi in pyarrow.lib._handle_arrow_array_protocol()\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in __arrow_array__(self, type)\r\n 203 # Also, when trying type \"string\", we don't want to convert integers or floats to \"string\".\r\n 204 # We only do it if trying_type is False - since this is what the user asks for.\r\n--> 205 out = cast_array_to_feature(out, type, allow_number_to_str=not self.trying_type)\r\n 206 return out\r\n 207 except (TypeError, pa.lib.ArrowInvalid) as e: # handle type errors and overflows\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 942 if pa.types.is_list(array.type) and config.PYARROW_VERSION < version.parse(\"4.0.0\"):\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n 946 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 918 return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n 922 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1063 # feature must be either [subfeature] or Sequence(subfeature)\r\n 1064 if isinstance(feature, list):\r\n-> 1065 return pa.ListArray.from_arrays(array.offsets, _c(array.values, feature[0]))\r\n 1066 elif isinstance(feature, Sequence):\r\n 1067 if feature.length > -1:\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 942 if pa.types.is_list(array.type) and config.PYARROW_VERSION < version.parse(\"4.0.0\"):\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n 946 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 918 return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n 922 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1058 }\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n 1062 elif pa.types.is_list(array.type):\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in (.0)\r\n 1058 }\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n 1062 elif pa.types.is_list(array.type):\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 942 if pa.types.is_list(array.type) and config.PYARROW_VERSION < version.parse(\"4.0.0\"):\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n 946 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 918 return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n 922 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1058 }\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n 1062 elif pa.types.is_list(array.type):\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in (.0)\r\n 1058 }\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n 1062 elif pa.types.is_list(array.type):\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 942 if pa.types.is_list(array.type) and config.PYARROW_VERSION < version.parse(\"4.0.0\"):\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n 946 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 918 return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n 922 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1085 elif not isinstance(feature, (Sequence, dict, list, tuple)):\r\n 1086 return array_cast(array, feature(), allow_number_to_str=allow_number_to_str)\r\n-> 1087 raise TypeError(f\"Couldn't cast array of type\\n{array.type}\\nto\\n{feature}\")\r\n 1088 \r\n 1089 \r\n\r\nTypeError: Couldn't cast array of type\r\nstruct<\u533b\u9662-3.0T MRI: string, \u533b\u9662-CT: string, \u533b\u9662-DSA: string, \u533b\u9662-\u516c\u4ea4\u7ebf\u8def: string, \u533b\u9662-\u533a\u57df: string, \u533b\u9662-\u540d\u79f0: string, \u533b\u9662-\u5730\u5740: string, \u533b\u9662-\u5730\u94c1\u53ef\u8fbe: string, \u533b\u9662-\u5730\u94c1\u7ebf\u8def: string, \u533b\u9662-\u6027\u8d28: string, \u533b\u9662-\u6302\u53f7\u65f6\u95f4: string, \u533b\u9662-\u7535\u8bdd: string, \u533b\u9662-\u7b49\u7ea7: string, \u533b\u9662-\u7c7b\u522b: string, \u533b\u9662-\u91cd\u70b9\u79d1\u5ba4: string, \u533b\u9662-\u95e8\u8bca\u65f6\u95f4: string, \u5929\u6c14-\u57ce\u5e02: string, \u5929\u6c14-\u5929\u6c14: string, \u5929\u6c14-\u65e5\u671f: string, \u5929\u6c14-\u6e29\u5ea6: string, \u5929\u6c14-\u7d2b\u5916\u7ebf\u5f3a\u5ea6: string, \u5929\u6c14-\u98ce\u529b\u98ce\u5411: string, \u65c5\u6e38\u666f\u70b9-\u533a\u57df: string, \u65c5\u6e38\u666f\u70b9-\u540d\u79f0: string, \u65c5\u6e38\u666f\u70b9-\u5730\u5740: string, \u65c5\u6e38\u666f\u70b9-\u5f00\u653e\u65f6\u95f4: string, \u65c5\u6e38\u666f\u70b9-\u662f\u5426\u5730\u94c1\u76f4\u8fbe: string, \u65c5\u6e38\u666f\u70b9-\u666f\u70b9\u7c7b\u578b: string, \u65c5\u6e38\u666f\u70b9-\u6700\u9002\u5408\u4eba\u7fa4: string, \u65c5\u6e38\u666f\u70b9-\u6d88\u8d39: string, \u65c5\u6e38\u666f\u70b9-\u7279\u70b9: string, \u65c5\u6e38\u666f\u70b9-\u7535\u8bdd\u53f7\u7801: string, \u65c5\u6e38\u666f\u70b9-\u8bc4\u5206: string, \u65c5\u6e38\u666f\u70b9-\u95e8\u7968\u4ef7\u683c: string, \u6c7d\u8f66-\u4ef7\u683c(\u4e07\u5143): string, \u6c7d\u8f66-\u5012\u8f66\u5f71\u50cf: string, \u6c7d\u8f66-\u52a8\u529b\u6c34\u5e73: string, \u6c7d\u8f66-\u5382\u5546: string, \u6c7d\u8f66-\u53d1\u52a8\u673a\u6392\u91cf(L): string, \u6c7d\u8f66-\u53d1\u52a8\u673a\u9a6c\u529b(Ps): string, \u6c7d\u8f66-\u540d\u79f0: string, \u6c7d\u8f66-\u5b9a\u901f\u5de1\u822a: string, \u6c7d\u8f66-\u5de1\u822a\u7cfb\u7edf: string, \u6c7d\u8f66-\u5ea7\u4f4d\u6570: string, \u6c7d\u8f66-\u5ea7\u6905\u52a0\u70ed: string, \u6c7d\u8f66-\u5ea7\u6905\u901a\u98ce: string, \u6c7d\u8f66-\u6240\u5c5e\u4ef7\u683c\u533a\u95f4: string, \u6c7d\u8f66-\u6cb9\u8017\u6c34\u5e73: string, \u6c7d\u8f66-\u73af\u4fdd\u6807\u51c6: string, \u6c7d\u8f66-\u7ea7\u522b: string, \u6c7d\u8f66-\u7efc\u5408\u6cb9\u8017(L\/100km): string, \u6c7d\u8f66-\u80fd\u6e90\u7c7b\u578b: string, \u6c7d\u8f66-\u8f66\u578b: string, \u6c7d\u8f66-\u8f66\u7cfb: string, \u6c7d\u8f66-\u8f66\u8eab\u5c3a\u5bf8(mm): string, \u6c7d\u8f66-\u9a71\u52a8\u65b9\u5f0f: string, \u6c7d\u8f66-\u9a7e\u9a76\u8f85\u52a9\u5f71\u50cf: string, \u706b\u8f66-\u51fa\u53d1\u5730: string, \u706b\u8f66-\u51fa\u53d1\u65f6\u95f4: string, \u706b\u8f66-\u5230\u8fbe\u65f6\u95f4: string, \u706b\u8f66-\u5750\u5e2d: string, \u706b\u8f66-\u65e5\u671f: string, \u706b\u8f66-\u65f6\u957f: string, \u706b\u8f66-\u76ee\u7684\u5730: string, \u706b\u8f66-\u7968\u4ef7: string, \u706b\u8f66-\u8231\u4f4d\u6863\u6b21: string, \u706b\u8f66-\u8f66\u578b: string, \u706b\u8f66-\u8f66\u6b21\u4fe1\u606f: string, \u7535\u5f71-\u4e3b\u6f14: string, \u7535\u5f71-\u4e3b\u6f14\u540d\u5355: string, \u7535\u5f71-\u5177\u4f53\u4e0a\u6620\u65f6\u95f4: string, \u7535\u5f71-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a: string, \u7535\u5f71-\u5bfc\u6f14: string, \u7535\u5f71-\u5e74\u4ee3: string, \u7535\u5f71-\u7247\u540d: string, \u7535\u5f71-\u7247\u957f: string, \u7535\u5f71-\u7c7b\u578b: string, \u7535\u5f71-\u8c46\u74e3\u8bc4\u5206: string, \u7535\u8111-CPU: string, \u7535\u8111-CPU\u578b\u53f7: string, \u7535\u8111-\u4ea7\u54c1\u7c7b\u522b: string, \u7535\u8111-\u4ef7\u683c: string, \u7535\u8111-\u4ef7\u683c\u533a\u95f4: string, \u7535\u8111-\u5185\u5b58\u5bb9\u91cf: string, \u7535\u8111-\u5206\u7c7b: string, \u7535\u8111-\u54c1\u724c: string, \u7535\u8111-\u5546\u54c1\u540d\u79f0: string, \u7535\u8111-\u5c4f\u5e55\u5c3a\u5bf8: string, \u7535\u8111-\u5f85\u673a\u65f6\u957f: string, \u7535\u8111-\u663e\u5361\u578b\u53f7: string, \u7535\u8111-\u663e\u5361\u7c7b\u522b: string, \u7535\u8111-\u6e38\u620f\u6027\u80fd: string, \u7535\u8111-\u7279\u6027: string, \u7535\u8111-\u786c\u76d8\u5bb9\u91cf: string, \u7535\u8111-\u7cfb\u5217: string, \u7535\u8111-\u7cfb\u7edf: string, \u7535\u8111-\u8272\u7cfb: string, \u7535\u8111-\u88f8\u673a\u91cd\u91cf: string, \u7535\u89c6\u5267-\u4e3b\u6f14: string, \u7535\u89c6\u5267-\u4e3b\u6f14\u540d\u5355: string, \u7535\u89c6\u5267-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a: string, \u7535\u89c6\u5267-\u5355\u96c6\u7247\u957f: string, \u7535\u89c6\u5267-\u5bfc\u6f14: string, \u7535\u89c6\u5267-\u5e74\u4ee3: string, \u7535\u89c6\u5267-\u7247\u540d: string, \u7535\u89c6\u5267-\u7c7b\u578b: string, \u7535\u89c6\u5267-\u8c46\u74e3\u8bc4\u5206: string, \u7535\u89c6\u5267-\u96c6\u6570: string, \u7535\u89c6\u5267-\u9996\u64ad\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65b9\u5f0f: string, \u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4e0b\u8bfe\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4ef7\u683c: string, \u8f85\u5bfc\u73ed-\u533a\u57df: string, \u8f85\u5bfc\u73ed-\u5e74\u7ea7: string, \u8f85\u5bfc\u73ed-\u5f00\u59cb\u65e5\u671f: string, \u8f85\u5bfc\u73ed-\u6559\u5ba4\u5730\u70b9: string, \u8f85\u5bfc\u73ed-\u6559\u5e08: string, \u8f85\u5bfc\u73ed-\u6559\u5e08\u7f51\u5740: string, \u8f85\u5bfc\u73ed-\u65f6\u6bb5: string, \u8f85\u5bfc\u73ed-\u6821\u533a: string, \u8f85\u5bfc\u73ed-\u6bcf\u5468: string, \u8f85\u5bfc\u73ed-\u73ed\u53f7: string, \u8f85\u5bfc\u73ed-\u79d1\u76ee: string, \u8f85\u5bfc\u73ed-\u7ed3\u675f\u65e5\u671f: string, \u8f85\u5bfc\u73ed-\u8bfe\u65f6: string, \u8f85\u5bfc\u73ed-\u8bfe\u6b21: string, \u8f85\u5bfc\u73ed-\u8bfe\u7a0b\u7f51\u5740: string, \u8f85\u5bfc\u73ed-\u96be\u5ea6: string, \u901a\u7528-\u4ea7\u54c1\u7c7b\u522b: string, \u901a\u7528-\u4ef7\u683c\u533a\u95f4: string, \u901a\u7528-\u54c1\u724c: string, \u901a\u7528-\u7cfb\u5217: string, \u9152\u5e97-\u4ef7\u4f4d: string, \u9152\u5e97-\u505c\u8f66\u573a: string, \u9152\u5e97-\u533a\u57df: string, \u9152\u5e97-\u540d\u79f0: string, \u9152\u5e97-\u5730\u5740: string, \u9152\u5e97-\u623f\u578b: string, \u9152\u5e97-\u623f\u8d39: string, \u9152\u5e97-\u661f\u7ea7: string, \u9152\u5e97-\u7535\u8bdd\u53f7\u7801: string, \u9152\u5e97-\u8bc4\u5206: string, \u9152\u5e97-\u9152\u5e97\u7c7b\u578b: string, \u98de\u673a-\u51c6\u70b9\u7387: string, \u98de\u673a-\u51fa\u53d1\u5730: string, \u98de\u673a-\u5230\u8fbe\u65f6\u95f4: string, \u98de\u673a-\u65e5\u671f: string, \u98de\u673a-\u76ee\u7684\u5730: string, \u98de\u673a-\u7968\u4ef7: string, \u98de\u673a-\u822a\u73ed\u4fe1\u606f: string, \u98de\u673a-\u8231\u4f4d\u6863\u6b21: string, \u98de\u673a-\u8d77\u98de\u65f6\u95f4: string, \u9910\u5385-\u4eba\u5747\u6d88\u8d39: string, \u9910\u5385-\u4ef7\u4f4d: string, \u9910\u5385-\u533a\u57df: string, \u9910\u5385-\u540d\u79f0: string, \u9910\u5385-\u5730\u5740: string, \u9910\u5385-\u63a8\u8350\u83dc: string, \u9910\u5385-\u662f\u5426\u5730\u94c1\u76f4\u8fbe: string, \u9910\u5385-\u7535\u8bdd\u53f7\u7801: string, \u9910\u5385-\u83dc\u7cfb: string, \u9910\u5385-\u8425\u4e1a\u65f6\u95f4: string, \u9910\u5385-\u8bc4\u5206: string>\r\nto\r\n{'\u65c5\u6e38\u666f\u70b9-\u540d\u79f0': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u533a\u57df': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u666f\u70b9\u7c7b\u578b': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u6700\u9002\u5408\u4eba\u7fa4': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u6d88\u8d39': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u662f\u5426\u5730\u94c1\u76f4\u8fbe': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u95e8\u7968\u4ef7\u683c': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u5730\u5740': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u8bc4\u5206': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u5f00\u653e\u65f6\u95f4': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u7279\u70b9': Value(dtype='string', id=None), '\u9910\u5385-\u540d\u79f0': Value(dtype='string', id=None), '\u9910\u5385-\u533a\u57df': Value(dtype='string', id=None), '\u9910\u5385-\u83dc\u7cfb': Value(dtype='string', id=None), '\u9910\u5385-\u4ef7\u4f4d': Value(dtype='string', id=None), '\u9910\u5385-\u662f\u5426\u5730\u94c1\u76f4\u8fbe': Value(dtype='string', id=None), '\u9910\u5385-\u4eba\u5747\u6d88\u8d39': Value(dtype='string', id=None), '\u9910\u5385-\u5730\u5740': Value(dtype='string', id=None), '\u9910\u5385-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u9910\u5385-\u8bc4\u5206': Value(dtype='string', id=None), '\u9910\u5385-\u8425\u4e1a\u65f6\u95f4': Value(dtype='string', id=None), '\u9910\u5385-\u63a8\u8350\u83dc': Value(dtype='string', id=None), '\u9152\u5e97-\u540d\u79f0': Value(dtype='string', id=None), '\u9152\u5e97-\u533a\u57df': Value(dtype='string', id=None), '\u9152\u5e97-\u661f\u7ea7': Value(dtype='string', id=None), '\u9152\u5e97-\u4ef7\u4f4d': Value(dtype='string', id=None), '\u9152\u5e97-\u9152\u5e97\u7c7b\u578b': Value(dtype='string', id=None), '\u9152\u5e97-\u623f\u578b': Value(dtype='string', id=None), '\u9152\u5e97-\u505c\u8f66\u573a': Value(dtype='string', id=None), '\u9152\u5e97-\u623f\u8d39': Value(dtype='string', id=None), '\u9152\u5e97-\u5730\u5740': Value(dtype='string', id=None), '\u9152\u5e97-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u9152\u5e97-\u8bc4\u5206': Value(dtype='string', id=None), '\u7535\u8111-\u54c1\u724c': Value(dtype='string', id=None), '\u7535\u8111-\u4ea7\u54c1\u7c7b\u522b': Value(dtype='string', id=None), '\u7535\u8111-\u5206\u7c7b': Value(dtype='string', id=None), '\u7535\u8111-\u5185\u5b58\u5bb9\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u5c4f\u5e55\u5c3a\u5bf8': Value(dtype='string', id=None), '\u7535\u8111-CPU': Value(dtype='string', id=None), '\u7535\u8111-\u4ef7\u683c\u533a\u95f4': Value(dtype='string', id=None), '\u7535\u8111-\u7cfb\u5217': Value(dtype='string', id=None), '\u7535\u8111-\u5546\u54c1\u540d\u79f0': Value(dtype='string', id=None), '\u7535\u8111-\u7cfb\u7edf': Value(dtype='string', id=None), '\u7535\u8111-\u6e38\u620f\u6027\u80fd': Value(dtype='string', id=None), '\u7535\u8111-CPU\u578b\u53f7': Value(dtype='string', id=None), '\u7535\u8111-\u88f8\u673a\u91cd\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u663e\u5361\u7c7b\u522b': Value(dtype='string', id=None), '\u7535\u8111-\u663e\u5361\u578b\u53f7': Value(dtype='string', id=None), '\u7535\u8111-\u7279\u6027': Value(dtype='string', id=None), '\u7535\u8111-\u8272\u7cfb': Value(dtype='string', id=None), '\u7535\u8111-\u5f85\u673a\u65f6\u957f': Value(dtype='string', id=None), '\u7535\u8111-\u786c\u76d8\u5bb9\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u4ef7\u683c': Value(dtype='string', id=None), '\u706b\u8f66-\u51fa\u53d1\u5730': Value(dtype='string', id=None), '\u706b\u8f66-\u76ee\u7684\u5730': Value(dtype='string', id=None), '\u706b\u8f66-\u65e5\u671f': Value(dtype='string', id=None), '\u706b\u8f66-\u8f66\u578b': Value(dtype='string', id=None), '\u706b\u8f66-\u5750\u5e2d': Value(dtype='string', id=None), '\u706b\u8f66-\u8f66\u6b21\u4fe1\u606f': Value(dtype='string', id=None), '\u706b\u8f66-\u65f6\u957f': Value(dtype='string', id=None), '\u706b\u8f66-\u51fa\u53d1\u65f6\u95f4': Value(dtype='string', id=None), '\u706b\u8f66-\u5230\u8fbe\u65f6\u95f4': Value(dtype='string', id=None), '\u706b\u8f66-\u7968\u4ef7': Value(dtype='string', id=None), '\u98de\u673a-\u51fa\u53d1\u5730': Value(dtype='string', id=None), '\u98de\u673a-\u76ee\u7684\u5730': Value(dtype='string', id=None), '\u98de\u673a-\u65e5\u671f': Value(dtype='string', id=None), '\u98de\u673a-\u8231\u4f4d\u6863\u6b21': Value(dtype='string', id=None), '\u98de\u673a-\u822a\u73ed\u4fe1\u606f': Value(dtype='string', id=None), '\u98de\u673a-\u8d77\u98de\u65f6\u95f4': Value(dtype='string', id=None), '\u98de\u673a-\u5230\u8fbe\u65f6\u95f4': Value(dtype='string', id=None), '\u98de\u673a-\u7968\u4ef7': Value(dtype='string', id=None), '\u98de\u673a-\u51c6\u70b9\u7387': Value(dtype='string', id=None), '\u5929\u6c14-\u57ce\u5e02': Value(dtype='string', id=None), '\u5929\u6c14-\u65e5\u671f': Value(dtype='string', id=None), '\u5929\u6c14-\u5929\u6c14': Value(dtype='string', id=None), '\u5929\u6c14-\u6e29\u5ea6': Value(dtype='string', id=None), '\u5929\u6c14-\u98ce\u529b\u98ce\u5411': Value(dtype='string', id=None), '\u5929\u6c14-\u7d2b\u5916\u7ebf\u5f3a\u5ea6': Value(dtype='string', id=None), '\u7535\u5f71-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a': Value(dtype='string', id=None), '\u7535\u5f71-\u7c7b\u578b': Value(dtype='string', id=None), '\u7535\u5f71-\u5e74\u4ee3': Value(dtype='string', id=None), '\u7535\u5f71-\u4e3b\u6f14': Value(dtype='string', id=None), '\u7535\u5f71-\u5bfc\u6f14': Value(dtype='string', id=None), '\u7535\u5f71-\u7247\u540d': Value(dtype='string', id=None), '\u7535\u5f71-\u4e3b\u6f14\u540d\u5355': Value(dtype='string', id=None), '\u7535\u5f71-\u5177\u4f53\u4e0a\u6620\u65f6\u95f4': Value(dtype='string', id=None), '\u7535\u5f71-\u7247\u957f': Value(dtype='string', id=None), '\u7535\u5f71-\u8c46\u74e3\u8bc4\u5206': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u7c7b\u578b': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5e74\u4ee3': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u4e3b\u6f14': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5bfc\u6f14': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u7247\u540d': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u4e3b\u6f14\u540d\u5355': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u9996\u64ad\u65f6\u95f4': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u96c6\u6570': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5355\u96c6\u7247\u957f': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u8c46\u74e3\u8bc4\u5206': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u73ed\u53f7': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u96be\u5ea6': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u79d1\u76ee': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u5e74\u7ea7': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u533a\u57df': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6821\u533a': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65b9\u5f0f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u5f00\u59cb\u65e5\u671f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u7ed3\u675f\u65e5\u671f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6bcf\u5468': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65f6\u95f4': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0b\u8bfe\u65f6\u95f4': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u65f6\u6bb5': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u6b21': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u65f6': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5ba4\u5730\u70b9': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5e08': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4ef7\u683c': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u7a0b\u7f51\u5740': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5e08\u7f51\u5740': Value(dtype='string', id=None), '\u6c7d\u8f66-\u540d\u79f0': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u578b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u7ea7\u522b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u4f4d\u6570': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u8eab\u5c3a\u5bf8(mm)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5382\u5546': Value(dtype='string', id=None), '\u6c7d\u8f66-\u80fd\u6e90\u7c7b\u578b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u53d1\u52a8\u673a\u6392\u91cf(L)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u53d1\u52a8\u673a\u9a6c\u529b(Ps)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u9a71\u52a8\u65b9\u5f0f': Value(dtype='string', id=None), '\u6c7d\u8f66-\u7efc\u5408\u6cb9\u8017(L\/100km)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u73af\u4fdd\u6807\u51c6': Value(dtype='string', id=None), '\u6c7d\u8f66-\u9a7e\u9a76\u8f85\u52a9\u5f71\u50cf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5de1\u822a\u7cfb\u7edf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u4ef7\u683c(\u4e07\u5143)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u7cfb': Value(dtype='string', id=None), '\u6c7d\u8f66-\u52a8\u529b\u6c34\u5e73': Value(dtype='string', id=None), '\u6c7d\u8f66-\u6cb9\u8017\u6c34\u5e73': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5012\u8f66\u5f71\u50cf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5b9a\u901f\u5de1\u822a': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u6905\u52a0\u70ed': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u6905\u901a\u98ce': Value(dtype='string', id=None), '\u6c7d\u8f66-\u6240\u5c5e\u4ef7\u683c\u533a\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u540d\u79f0': Value(dtype='string', id=None), '\u533b\u9662-\u7b49\u7ea7': Value(dtype='string', id=None), '\u533b\u9662-\u7c7b\u522b': Value(dtype='string', id=None), '\u533b\u9662-\u6027\u8d28': Value(dtype='string', id=None), '\u533b\u9662-\u533a\u57df': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u5740': Value(dtype='string', id=None), '\u533b\u9662-\u7535\u8bdd': Value(dtype='string', id=None), '\u533b\u9662-\u6302\u53f7\u65f6\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u95e8\u8bca\u65f6\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u516c\u4ea4\u7ebf\u8def': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u94c1\u53ef\u8fbe': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u94c1\u7ebf\u8def': Value(dtype='string', id=None), '\u533b\u9662-\u91cd\u70b9\u79d1\u5ba4': Value(dtype='string', id=None), '\u533b\u9662-CT': Value(dtype='string', id=None), '\u533b\u9662-3.0T MRI': Value(dtype='string', id=None), '\u533b\u9662-DSA': Value(dtype='string', id=None)}\r\n```\r\n\r\n<\/details>\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.1\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3636","id":1115362702,"node_id":"PR_kwDODunzps4xohMB","number":3636,"title":"Update index.rst","user":{"login":"VioletteLepercq","id":95622912,"node_id":"U_kgDOBbMXAA","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/95622912?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VioletteLepercq","html_url":"https:\/\/github.com\/VioletteLepercq","followers_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/followers","following_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/orgs","repos_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/repos","events_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-26T18:43:09Z","updated_at":"2022-01-26T18:44:55Z","closed_at":"2022-01-26T18:44:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3636","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3636","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3636.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3636.patch","merged_at":"2022-01-26T18:44:54Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3635","id":1115333219,"node_id":"PR_kwDODunzps4xobAe","number":3635,"title":"Make `ted_talks_iwslt` dataset streamable","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-26T18:07:56Z","updated_at":"2022-01-27T13:40:55Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3635","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3635","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3635.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3635.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3634","id":1115133279,"node_id":"I_kwDODunzps5Cd5Vf","number":3634,"title":"Dataset.shuffle(seed=None) gives fixed row permutation","user":{"login":"elisno","id":18127060,"node_id":"MDQ6VXNlcjE4MTI3MDYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18127060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/elisno","html_url":"https:\/\/github.com\/elisno","followers_url":"https:\/\/api.github.com\/users\/elisno\/followers","following_url":"https:\/\/api.github.com\/users\/elisno\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/elisno\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/elisno\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/elisno\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/elisno\/orgs","repos_url":"https:\/\/api.github.com\/users\/elisno\/repos","events_url":"https:\/\/api.github.com\/users\/elisno\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/elisno\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-01-26T15:13:08Z","updated_at":"2022-01-27T18:16:07Z","closed_at":"2022-01-27T18:16:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nRepeated attempts to `shuffle` a dataset without specifying a seed give the same results.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n\r\n# Some toy example\r\ndata = datasets.Dataset.from_dict(\r\n {\"feature\": [1, 2, 3, 4, 5], \"label\": [\"a\", \"b\", \"c\", \"d\", \"e\"]}\r\n)\r\n\r\n# Doesn't work as expected\r\nprint(\"Shuffle dataset\")\r\nfor _ in range(3):\r\n print(data.shuffle(seed=None)[:])\r\n\r\n# This seems to work with pandas\r\nprint(\"\\nShuffle via pandas\")\r\nfor _ in range(3):\r\n df = data.to_pandas().sample(frac=1.0)\r\n print(datasets.Dataset.from_pandas(df, preserve_index=False)[:])\r\n\r\n```\r\n\r\n## Expected results\r\nI assumed that the default setting would initialize a new\/random state of a `np.random.BitGenerator` (see [docs](https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html?highlight=shuffle#datasets.Dataset.shuffle)).\r\n\r\nWouldn't that reshuffle the rows each time I call `data.shuffle()`?\r\n\r\n## Actual results\r\n\r\n```bash\r\nShuffle dataset\r\n{'feature': [5, 1, 3, 2, 4], 'label': ['e', 'a', 'c', 'b', 'd']}\r\n{'feature': [5, 1, 3, 2, 4], 'label': ['e', 'a', 'c', 'b', 'd']}\r\n{'feature': [5, 1, 3, 2, 4], 'label': ['e', 'a', 'c', 'b', 'd']}\r\n\r\nShuffle via pandas\r\n{'feature': [4, 2, 3, 1, 5], 'label': ['d', 'b', 'c', 'a', 'e']}\r\n{'feature': [2, 5, 3, 4, 1], 'label': ['b', 'e', 'c', 'd', 'a']}\r\n{'feature': [5, 2, 3, 1, 4], 'label': ['e', 'b', 'c', 'a', 'd']}\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.0\r\n- Platform: Linux-5.13.0-27-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.12\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3633","id":1115040174,"node_id":"PR_kwDODunzps4xng6E","number":3633,"title":"Mirror canonical datasets in prod","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-26T13:49:37Z","updated_at":"2022-01-26T13:56:21Z","closed_at":"2022-01-26T13:56:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3633","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3633","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3633.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3633.patch","merged_at":"2022-01-26T13:56:21Z"},"body":"Push the datasets changes to the Hub in production by setting `HF_USE_PROD=1`\r\n\r\nI also added a fix that makes the script ignore the json, csv, text, parquet and pandas dataset builders.\r\n\r\ncc @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3632","id":1115027185,"node_id":"I_kwDODunzps5Cdfbx","number":3632,"title":"Adding CC-100: Monolingual Datasets from Web Crawl Data (Datasets links are invalid)","user":{"login":"AnzorGozalishvili","id":55232459,"node_id":"MDQ6VXNlcjU1MjMyNDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55232459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AnzorGozalishvili","html_url":"https:\/\/github.com\/AnzorGozalishvili","followers_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/followers","following_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/orgs","repos_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/repos","events_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-01-26T13:35:37Z","updated_at":"2022-02-10T06:58:11Z","closed_at":"2022-02-10T06:58:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe dataset links are no longer valid for CC-100. It seems that the website which was keeping these files are no longer accessible and therefore this dataset became unusable. \r\nCheck out the dataset [homepage](http:\/\/data.statmt.org\/cc-100\/) which isn't accessible.\r\nAlso the URLs for dataset file per language isn't accessible: http:\/\/data.statmt.org\/cc-100\/.txt.xz (language codes: am, sr, ka, etc.)\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"cc100\", \"ka\")\r\n```\r\nIt throws 503 error.\r\n\r\n## Expected results\r\nIt should successfully download and load dataset but it throws an exception because the dataset files are no longer accessible.\r\n\r\n\r\n## Environment info\r\nRun from google colab. Just installed the library using pip:\r\n```!pip install -U datasets```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3631","id":1114833662,"node_id":"I_kwDODunzps5CcwL-","number":3631,"title":"Labels conflict when loading a local CSV file.","user":{"login":"pichljan","id":8571301,"node_id":"MDQ6VXNlcjg1NzEzMDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8571301?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pichljan","html_url":"https:\/\/github.com\/pichljan","followers_url":"https:\/\/api.github.com\/users\/pichljan\/followers","following_url":"https:\/\/api.github.com\/users\/pichljan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pichljan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pichljan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pichljan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pichljan\/orgs","repos_url":"https:\/\/api.github.com\/users\/pichljan\/repos","events_url":"https:\/\/api.github.com\/users\/pichljan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pichljan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-26T10:00:33Z","updated_at":"2022-02-11T23:02:31Z","closed_at":"2022-02-11T23:02:31Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to load a local CSV file with a separate file containing label names. It is successfully loaded for the first time, but when I try to load it again, there is a conflict between provided labels and the cached dataset info. Disabling caching globally and\/or using `download_mode=\"force_redownload\"` did not help.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset('csv', data_files='data\/my_data.csv',\r\n features=Features(text=Value(dtype='string'),\r\n label=ClassLabel(names_file='data\/my_data_labels.txt')))\r\n```\r\n`my_data.csv` file has the following structure:\r\n```\r\ntext,label\r\n\"example1\",0\r\n\"example2\",1\r\n...\r\n```\r\nand the `my_data_labels.txt` looks like this:\r\n```\r\nlabel1\r\nlabel2\r\n...\r\n```\r\n\r\n## Expected results\r\nSuccessfully loaded dataset.\r\n\r\n## Actual results\r\n```python\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1706, in load_dataset\r\n ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 766, in as_dataset\r\n datasets = utils.map_nested(\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 261, in map_nested\r\n mapped = [\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 262, in \r\n _single_map_nested((function, obj, types, None, True))\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 197, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 797, in _build_single_dataset\r\n ds = self._as_dataset(\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 872, in _as_dataset\r\n return Dataset(fingerprint=fingerprint, **dataset_kwargs)\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 638, in __init__\r\n inferred_features = Features.from_arrow_schema(arrow_table.schema)\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1242, in from_arrow_schema\r\n return Features.from_dict(metadata[\"info\"][\"features\"])\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1271, in from_dict\r\n obj = generate_from_dict(dic)\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1076, in generate_from_dict\r\n return {key: generate_from_dict(value) for key, value in obj.items()}\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1076, in \r\n return {key: generate_from_dict(value) for key, value in obj.items()}\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1083, in generate_from_dict\r\n return class_type(**{k: v for k, v in obj.items() if k in field_names})\r\n File \"\", line 7, in __init__\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 776, in __post_init__\r\n raise ValueError(\"Please provide either names or names_file but not both.\")\r\nValueError: Please provide either names or names_file but not both.\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.18.0\r\n- Python version: 3.8.2\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3630","id":1114578625,"node_id":"I_kwDODunzps5Cbx7B","number":3630,"title":"DuplicatedKeysError of NewsQA dataset","user":{"login":"StevenTang1998","id":37647985,"node_id":"MDQ6VXNlcjM3NjQ3OTg1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37647985?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/StevenTang1998","html_url":"https:\/\/github.com\/StevenTang1998","followers_url":"https:\/\/api.github.com\/users\/StevenTang1998\/followers","following_url":"https:\/\/api.github.com\/users\/StevenTang1998\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/StevenTang1998\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/StevenTang1998\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/StevenTang1998\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/StevenTang1998\/orgs","repos_url":"https:\/\/api.github.com\/users\/StevenTang1998\/repos","events_url":"https:\/\/api.github.com\/users\/StevenTang1998\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/StevenTang1998\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-26T03:05:49Z","updated_at":"2022-02-10T09:59:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After processing the dataset following official [NewsQA](https:\/\/github.com\/Maluuba\/newsqa), I used datasets to load it:\r\n```\r\na = load_dataset('newsqa', data_dir='news')\r\n```\r\nand the following error occurred: \r\n```\r\nUsing custom data configuration default-data_dir=news\r\nDownloading and preparing dataset newsqa\/default to \/root\/.cache\/huggingface\/datasets\/newsqa\/default-data_dir=news\/1.0.0\/b0b23e22d94a3d352ad9d75aff2b71375264a122fae301463079ee8595e05ab9...\r\nTraceback (most recent call last): \r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/builder.py\", line 1084, in _prepare_split\r\n writer.write(example, key)\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/arrow_writer.py\", line 442, in write\r\n self.check_duplicate_keys()\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/arrow_writer.py\", line 453, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: .\/cnn\/stories\/6a0f9c8a5d0c6e8949b37924163c92923fe5770d.story\r\nKeys should be unique and deterministic in nature\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/load.py\", line 1694, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/builder.py\", line 595, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/builder.py\", line 684, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/builder.py\", line 1086, in _prepare_split\r\n num_examples, num_bytes = writer.finalize()\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/arrow_writer.py\", line 524, in finalize\r\n self.check_duplicate_keys()\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/arrow_writer.py\", line 453, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: .\/cnn\/stories\/6a0f9c8a5d0c6e8949b37924163c92923fe5770d.story\r\nKeys should be unique and deterministic in nature\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3629","id":1113971575,"node_id":"PR_kwDODunzps4xkCZA","number":3629,"title":"Fix Hub repos update when there's a new release","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-25T14:39:45Z","updated_at":"2022-01-25T14:55:46Z","closed_at":"2022-01-25T14:55:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3629","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3629","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3629.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3629.patch","merged_at":"2022-01-25T14:55:46Z"},"body":"It was not listing the full list of datasets correctly\r\n\r\ncc @SBrandeis this is why it failed for 1.18.0\r\n\r\nWe should be good now !","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3628","id":1113930644,"node_id":"I_kwDODunzps5CZTuU","number":3628,"title":"Dataset Card Creator drops information for \"Additional Information\" Section","user":{"login":"dennlinger","id":26013491,"node_id":"MDQ6VXNlcjI2MDEzNDkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26013491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dennlinger","html_url":"https:\/\/github.com\/dennlinger","followers_url":"https:\/\/api.github.com\/users\/dennlinger\/followers","following_url":"https:\/\/api.github.com\/users\/dennlinger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dennlinger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dennlinger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dennlinger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dennlinger\/orgs","repos_url":"https:\/\/api.github.com\/users\/dennlinger\/repos","events_url":"https:\/\/api.github.com\/users\/dennlinger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dennlinger\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-25T14:06:17Z","updated_at":"2022-01-25T14:09:01Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"First of all, the card creator is a great addition and really helpful for streamlining dataset cards!\r\n\r\n## Describe the bug\r\nI encountered an inconvenient bug when entering \"Additional Information\" in the react app, which drops already entered text when switching to a previous section, and then back again to \"Additional Information\". I was able to reproduce the issue in both Firefox and Chrome, so I suspect a problem with the React logic that doesn't expect users to switch back in the final section. \r\nEdit: I'm also not sure whether this is the right place to open the bug report on, since it's not clear to me which particular project it belongs to, or where I could find associated source code.\r\n\r\n## Steps to reproduce the bug\r\n\r\n1. Navigate to the Section \"Additional Information\" in the [dataset card creator](https:\/\/huggingface.co\/datasets\/card-creator\/)\r\n2. Enter text in an arbitrary field, e.g., \"Dataset Curators\".\r\n3. Switch back to a previous section, like \"Dataset Creation\".\r\n4. When switching back again to \"Additional Information\", the text has been deleted.\r\n\r\nNotably, this behavior can be reproduced again and again, it's not just problematic for the first \"switch-back\" from Additional Information.\r\n\r\n## Expected results\r\nFor step 4, the previously entered information should still be present in the boxes, similar to the behavior to all other sections (switching back there works as expected)\r\n\r\n## Actual results\r\nThe text boxes are empty again, and previously entered text got deleted.\r\n\r\n## Environment info\r\n- `datasets` version: N\/A\r\n- Platform: Firefox 96.0 \/ Chrome 97.0\r\n- Python version: N\/A\r\n- PyArrow version: N\/A\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3627","id":1113556837,"node_id":"PR_kwDODunzps4xitGe","number":3627,"title":"Fix host URL in The Pile datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-25T08:11:28Z","updated_at":"2022-02-12T12:59:17Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3627","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3627","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3627.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3627.patch","merged_at":null},"body":"This PR fixes the host URL in The Pile datasets, once they have mirrored their data in another server.\r\n\r\nFix #3626.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3626","id":1113534436,"node_id":"I_kwDODunzps5CXy_k","number":3626,"title":"The Pile cannot connect to host","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-25T07:43:33Z","updated_at":"2022-01-25T07:43:34Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe Pile had issues with their previous host server and have mirrored its content to another server.\r\n\r\nThe new URL server should be updated.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3625","id":1113017522,"node_id":"I_kwDODunzps5CV0yy","number":3625,"title":"Add a metadata field for when source data was produced ","user":{"login":"davanstrien","id":8995957,"node_id":"MDQ6VXNlcjg5OTU5NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8995957?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/davanstrien","html_url":"https:\/\/github.com\/davanstrien","followers_url":"https:\/\/api.github.com\/users\/davanstrien\/followers","following_url":"https:\/\/api.github.com\/users\/davanstrien\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/davanstrien\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/davanstrien\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/davanstrien\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/davanstrien\/orgs","repos_url":"https:\/\/api.github.com\/users\/davanstrien\/repos","events_url":"https:\/\/api.github.com\/users\/davanstrien\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/davanstrien\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-24T18:52:39Z","updated_at":"2022-01-27T18:13:06Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nThe current problem is that information about when source data was produced is not easily visible. Though there are a variety of metadata fields available in the dataset viewer, time period information is not included. This feature request suggests making metadata relating to the time that the underlying *source* data was produced more prominent and outlines why this specific information is of particular importance, both in domain-specific historic research and more broadly.\r\n\r\n**Describe the solution you'd like**\r\n\r\nThere are a variety of metadata fields exposed in the dataset viewer (license, task categories, etc.) These fields make this metadata more prominent both for human users and as potentially machine-actionable information (for example, through the API). I would propose to add a metadata field that says when some underlying data was produced. For example, a dataset would be labelled as being produced between `1800-1900`. \r\n\r\n**Describe alternatives you've considered**\r\nThis information is sometimes available in the Datacard or a paper describing the dataset. However, it's often not that easy to identify or extract this information, particularly if you want to use this field as a filter to identify relevant datasets. \r\n\r\n**Additional context**\r\n\r\nI believe this feature is relevant for a number of reasons: \r\n- Increasingly, there is an interest in using historical data for training language models (for example, https:\/\/huggingface.co\/dbmdz\/bert-base-historic-dutch-cased), and datasets to support this task (for example, https:\/\/huggingface.co\/datasets\/bnl_newspapers). For these datasets, indicating the time periods covered is particularly relevant. \r\n- More broadly, time is likely a common source of domain drift. Datasets of movie reviews from the 90s may not work well for recent movie reviews. As the documentation and long-term management of ML data become more of a priority, quickly understanding the time when the underlying text (or other data types) is arguably more important. \r\n- time-series data: datasets are adding more support for time series data. Again, the periods covered might be particularly relevant here.\r\n\r\n**open questions**\r\n\r\n- I think some of my points above apply not only to the underlying data but also to annotations. As a result, there could also be an argument for encoding this information somewhere. However, I would argue (but could be persuaded otherwise) that this is probably less important for filtering. This type of context is already addressed in the datasheets template and often requires more narrative to discuss. \r\n- what level of granularity would make sense for this? e.g. assigning a decade, century or year?\r\n- how to encode this information? What formatting makes sense \r\n- what specific time to encode; a data range? (mean, modal, min, max value?) \r\n\r\nThis is a slightly amorphous feature request - I would be happy to discuss further\/try and propose a more concrete solution if this seems like something that could be worth considering. I realise this might also touch on other parts of the \ud83e\udd17 hubs ecosystem. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3623","id":1112835239,"node_id":"PR_kwDODunzps4xgWig","number":3623,"title":"Extend support for streaming datasets that use os.path.relpath","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-24T16:00:52Z","updated_at":"2022-02-04T14:03:55Z","closed_at":"2022-02-04T14:03:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3623","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3623","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3623.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3623.patch","merged_at":"2022-02-04T14:03:54Z"},"body":"This PR extends the support in streaming mode for datasets that use `os.path.relpath`, by patching that function.\r\n\r\nThis feature will also be useful to yield the relative path of audio or image files, within an archive or parent dir.\r\n\r\nClose #3622.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3622","id":1112831661,"node_id":"I_kwDODunzps5CVHat","number":3622,"title":"Extend support for streaming datasets that use os.path.relpath","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-24T15:58:23Z","updated_at":"2022-02-04T14:03:54Z","closed_at":"2022-02-04T14:03:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Extend support for streaming datasets that use `os.path.relpath`.\r\n\r\nThis feature will also be useful to yield the relative path of audio or image files.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3621","id":1112720434,"node_id":"I_kwDODunzps5CUsQy","number":3621,"title":"Consider adding `ipywidgets` as a dependency.","user":{"login":"koaning","id":1019791,"node_id":"MDQ6VXNlcjEwMTk3OTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1019791?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/koaning","html_url":"https:\/\/github.com\/koaning","followers_url":"https:\/\/api.github.com\/users\/koaning\/followers","following_url":"https:\/\/api.github.com\/users\/koaning\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/koaning\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/koaning\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/koaning\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/koaning\/orgs","repos_url":"https:\/\/api.github.com\/users\/koaning\/repos","events_url":"https:\/\/api.github.com\/users\/koaning\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/koaning\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-24T14:27:11Z","updated_at":"2022-01-24T15:14:15Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I install `datasets` in a fresh virtualenv with jupyterlab I always see this error. \r\n\r\n```\r\nImportError: IProgress not found. Please update jupyter and ipywidgets. See https:\/\/ipywidgets.readthedocs.io\/en\/stable\/user_install.html\r\n```\r\n\r\nIt's a bit of a nuisance, because I need to run shut down the jupyterlab server in order to install the required dependency. Might it be an option to just include it as a dependency here? ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3620","id":1112677252,"node_id":"PR_kwDODunzps4xf1J3","number":3620,"title":"Add Fon language tag","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-24T13:52:26Z","updated_at":"2022-02-04T14:04:36Z","closed_at":"2022-02-04T14:04:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3620","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3620","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3620.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3620.patch","merged_at":"2022-02-04T14:04:35Z"},"body":"Add Fon language tag to resources.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3619","id":1112611415,"node_id":"PR_kwDODunzps4xfnCQ","number":3619,"title":"fix meta in mls","user":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-24T12:54:38Z","updated_at":"2022-01-24T20:53:22Z","closed_at":"2022-01-24T20:53:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3619","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3619","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3619.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3619.patch","merged_at":"2022-01-24T20:53:21Z"},"body":"`monolingual` value of `m ultilinguality` param in yaml meta was changed to `multilingual` :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3618","id":1112123365,"node_id":"I_kwDODunzps5CSafl","number":3618,"title":"TIMIT Dataset not working with GPU","user":{"login":"TheSeamau5","id":3227869,"node_id":"MDQ6VXNlcjMyMjc4Njk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3227869?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TheSeamau5","html_url":"https:\/\/github.com\/TheSeamau5","followers_url":"https:\/\/api.github.com\/users\/TheSeamau5\/followers","following_url":"https:\/\/api.github.com\/users\/TheSeamau5\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TheSeamau5\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TheSeamau5\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TheSeamau5\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TheSeamau5\/orgs","repos_url":"https:\/\/api.github.com\/users\/TheSeamau5\/repos","events_url":"https:\/\/api.github.com\/users\/TheSeamau5\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TheSeamau5\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-24T03:26:03Z","updated_at":"2022-01-27T13:17:51Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am working trying to use the TIMIT dataset in order to fine-tune Wav2Vec2 model and I am unable to load the \"audio\" column from the dataset when working with a GPU. \r\n\r\nI am working on Amazon Sagemaker Studio, on the Python 3 (PyTorch 1.8 Python 3.6 GPU Optimized) environment, with a single ml.g4dn.xlarge instance (corresponds to a Tesla T4 GPU). \r\n\r\nI don't know if the issue is GPU related or Python environment related because everything works when I work off of the CPU Optimized environment with a non-GPU instance. My code also works on Google Colab with a GPU instance. \r\n\r\nThis issue is blocking because I cannot get the 'audio' column in any way due to this error, which means that I can't pass it to any functions. I later use the dataset.map function and that is where I originally noticed this error. \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ntimit_train = load_dataset('timit_asr', split='train')\r\nprint(timit_train['audio'])\r\n```\r\n\r\n## Expected results\r\nExpected to see inside the 'audio' column, which contains an 'array' nested field with the array data I actually need.\r\n\r\n## Actual results\r\n\r\nTraceback\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n----> 1 timit_train['audio']\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py in __getitem__(self, key)\r\n 1917 \"\"\"Can be used to index columns (by string names) or rows (by integer index or iterable of indices or bools).\"\"\"\r\n 1918 return self._getitem(\r\n-> 1919 key,\r\n 1920 )\r\n 1921 \r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py in _getitem(self, key, decoded, **kwargs)\r\n 1902 pa_subtable = query_table(self._data, key, indices=self._indices if self._indices is not None else None)\r\n 1903 formatted_output = format_table(\r\n-> 1904 pa_subtable, key, formatter=formatter, format_columns=format_columns, output_all_columns=output_all_columns\r\n 1905 )\r\n 1906 return formatted_output\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/formatting\/formatting.py in format_table(table, key, formatter, format_columns, output_all_columns)\r\n 529 python_formatter = PythonFormatter(features=None)\r\n 530 if format_columns is None:\r\n--> 531 return formatter(pa_table, query_type=query_type)\r\n 532 elif query_type == \"column\":\r\n 533 if key in format_columns:\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/formatting\/formatting.py in __call__(self, pa_table, query_type)\r\n 280 return self.format_row(pa_table)\r\n 281 elif query_type == \"column\":\r\n--> 282 return self.format_column(pa_table)\r\n 283 elif query_type == \"batch\":\r\n 284 return self.format_batch(pa_table)\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/formatting\/formatting.py in format_column(self, pa_table)\r\n 315 column = self.python_arrow_extractor().extract_column(pa_table)\r\n 316 if self.decoded:\r\n--> 317 column = self.python_features_decoder.decode_column(column, pa_table.column_names[0])\r\n 318 return column\r\n 319 \r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/formatting\/formatting.py in decode_column(self, column, column_name)\r\n 221 \r\n 222 def decode_column(self, column: list, column_name: str) -> list:\r\n--> 223 return self.features.decode_column(column, column_name) if self.features else column\r\n 224 \r\n 225 def decode_batch(self, batch: dict) -> dict:\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/features\/features.py in decode_column(self, column, column_name)\r\n 1337 return (\r\n 1338 [self[column_name].decode_example(value) if value is not None else None for value in column]\r\n-> 1339 if self._column_requires_decoding[column_name]\r\n 1340 else column\r\n 1341 )\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/features\/features.py in (.0)\r\n 1336 \"\"\"\r\n 1337 return (\r\n-> 1338 [self[column_name].decode_example(value) if value is not None else None for value in column]\r\n 1339 if self._column_requires_decoding[column_name]\r\n 1340 else column\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/features\/audio.py in decode_example(self, value)\r\n 85 dict\r\n 86 \"\"\"\r\n---> 87 path, file = (value[\"path\"], BytesIO(value[\"bytes\"])) if value[\"bytes\"] is not None else (value[\"path\"], None)\r\n 88 if path is None and file is None:\r\n 89 raise ValueError(f\"An audio sample should have one of 'path' or 'bytes' but both are None in {value}.\")\r\n\r\nTypeError: string indices must be integers\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.0\r\n- Platform: Linux-4.14.256-197.484.amzn2.x86_64-x86_64-with-debian-buster-sid\r\n- Python version: 3.6.13\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3617","id":1111938691,"node_id":"PR_kwDODunzps4xdb8K","number":3617,"title":"PR for the CFPB Consumer Complaints dataset","user":{"login":"kayvane1","id":42403093,"node_id":"MDQ6VXNlcjQyNDAzMDkz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42403093?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kayvane1","html_url":"https:\/\/github.com\/kayvane1","followers_url":"https:\/\/api.github.com\/users\/kayvane1\/followers","following_url":"https:\/\/api.github.com\/users\/kayvane1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kayvane1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kayvane1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kayvane1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kayvane1\/orgs","repos_url":"https:\/\/api.github.com\/users\/kayvane1\/repos","events_url":"https:\/\/api.github.com\/users\/kayvane1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kayvane1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2022-01-23T17:47:12Z","updated_at":"2022-02-07T21:08:31Z","closed_at":"2022-02-07T21:08:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3617","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3617","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3617.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3617.patch","merged_at":"2022-02-07T21:08:31Z"},"body":"Think I followed all the steps but please let me know if anything needs changing or any improvements I can make to the code quality","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3616","id":1111587861,"node_id":"PR_kwDODunzps4xcZMD","number":3616,"title":"Make streamable the BnL Historical Newspapers dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-22T14:52:36Z","updated_at":"2022-02-04T14:05:23Z","closed_at":"2022-02-04T14:05:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3616","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3616","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3616.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3616.patch","merged_at":"2022-02-04T14:05:21Z"},"body":"I've refactored the code in order to make the dataset streamable and to avoid it takes too long:\r\n- I've used `iter_files`\r\n\r\nClose #3615 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3615","id":1111576876,"node_id":"I_kwDODunzps5CQVEs","number":3615,"title":"Dataset BnL Historical Newspapers does not work in streaming mode","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2022-01-22T14:12:59Z","updated_at":"2022-02-04T14:05:21Z","closed_at":"2022-02-04T14:05:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen trying to load in streaming mode, it \"hangs\"...\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nds = load_dataset(\"bnl_newspapers\", split=\"train\", streaming=True)\r\n```\r\n\r\n## Expected results\r\nThe code should be optimized, so that it works fast in streaming mode.\r\n\r\nCC: @davanstrien \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3614","id":1110736657,"node_id":"PR_kwDODunzps4xZdCe","number":3614,"title":"Minor fixes","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-21T17:48:44Z","updated_at":"2022-01-24T12:45:49Z","closed_at":"2022-01-24T12:45:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3614","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3614","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3614.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3614.patch","merged_at":"2022-01-24T12:45:49Z"},"body":"This PR:\r\n* adds \"desc\" to the `ignore_kwargs` list in `Dataset.filter`\r\n* fixes the default value of `id` in `DatasetDict.prepare_for_task`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3613","id":1110684015,"node_id":"I_kwDODunzps5CM7Fv","number":3613,"title":"Files not updating in dataset viewer","user":{"login":"abidlabs","id":1778297,"node_id":"MDQ6VXNlcjE3NzgyOTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1778297?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abidlabs","html_url":"https:\/\/github.com\/abidlabs","followers_url":"https:\/\/api.github.com\/users\/abidlabs\/followers","following_url":"https:\/\/api.github.com\/users\/abidlabs\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abidlabs\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abidlabs\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abidlabs\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abidlabs\/orgs","repos_url":"https:\/\/api.github.com\/users\/abidlabs\/repos","events_url":"https:\/\/api.github.com\/users\/abidlabs\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abidlabs\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-21T16:47:20Z","updated_at":"2022-01-22T08:13:13Z","closed_at":"2022-01-22T08:13:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*name of the dataset*'\r\n\r\n**Link:**\r\nSome examples:\r\n* https:\/\/huggingface.co\/datasets\/abidlabs\/crowdsourced-speech4\r\n* https:\/\/huggingface.co\/datasets\/abidlabs\/test-audio-13\r\n\r\n*short description of the issue*\r\nIt seems that the dataset viewer is reading a cached version of the dataset and it is not updating to reflect new files that are added to the dataset. I get this error:\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/1778297\/150566660-30dc0dcd-18fd-4471-b70c-7c4bdc6a23c6.png)\r\n\r\n\r\nAm I the one who added this dataset? Yes","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3612","id":1110506466,"node_id":"PR_kwDODunzps4xYsvS","number":3612,"title":"wikifix","user":{"login":"apergo-ai","id":68908804,"node_id":"MDQ6VXNlcjY4OTA4ODA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68908804?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apergo-ai","html_url":"https:\/\/github.com\/apergo-ai","followers_url":"https:\/\/api.github.com\/users\/apergo-ai\/followers","following_url":"https:\/\/api.github.com\/users\/apergo-ai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apergo-ai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apergo-ai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apergo-ai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apergo-ai\/orgs","repos_url":"https:\/\/api.github.com\/users\/apergo-ai\/repos","events_url":"https:\/\/api.github.com\/users\/apergo-ai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apergo-ai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-01-21T14:05:11Z","updated_at":"2022-02-03T17:58:16Z","closed_at":"2022-02-03T17:58:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3612","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3612","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3612.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3612.patch","merged_at":null},"body":"This should get the wikipedia dataloading script back up and running - at least I hope so (tested with language ff and ii)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3611","id":1110399096,"node_id":"I_kwDODunzps5CL1h4","number":3611,"title":"Indexing bug after dataset.select()","user":{"login":"kamalkraj","id":17096858,"node_id":"MDQ6VXNlcjE3MDk2ODU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17096858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kamalkraj","html_url":"https:\/\/github.com\/kamalkraj","followers_url":"https:\/\/api.github.com\/users\/kamalkraj\/followers","following_url":"https:\/\/api.github.com\/users\/kamalkraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kamalkraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kamalkraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kamalkraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kamalkraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/kamalkraj\/repos","events_url":"https:\/\/api.github.com\/users\/kamalkraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kamalkraj\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-21T12:09:30Z","updated_at":"2022-01-27T18:16:22Z","closed_at":"2022-01-27T18:16:22Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n \r\nDataset indexing is not working as expected after `dataset.select(range(100))`\r\n\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nimport datasets\r\n\r\ntask_to_keys = {\r\n \"cola\": (\"sentence\", None),\r\n \"mnli\": (\"premise\", \"hypothesis\"),\r\n \"mrpc\": (\"sentence1\", \"sentence2\"),\r\n \"qnli\": (\"question\", \"sentence\"),\r\n \"qqp\": (\"question1\", \"question2\"),\r\n \"rte\": (\"sentence1\", \"sentence2\"),\r\n \"sst2\": (\"sentence\", None),\r\n \"stsb\": (\"sentence1\", \"sentence2\"),\r\n \"wnli\": (\"sentence1\", \"sentence2\"),\r\n}\r\n\r\ntask_name = \"sst2\"\r\nraw_datasets = datasets.load_dataset(\"glue\", task_name)\r\n\r\n\r\ntrain_dataset = raw_datasets[\"train\"]\r\n\r\nprint(\"before select: \",train_dataset[-2:])\r\n# before select: {'sentence': ['a patient viewer ', 'this new jangle of noise , mayhem and stupidity must be a serious contender for the title . '], 'label': [1, 0], 'idx': [67347, 67348]}\r\n\r\ntrain_dataset = train_dataset.select(range(100))\r\n\r\nprint(\"after select: \",train_dataset[-2:])\r\n# after select: {'sentence': [], 'label': [], 'idx': []}\r\n\r\n```\r\n\r\nlink to colab: https:\/\/colab.research.google.com\/drive\/1LngeRC9f0jE7eSQ4Kh1cIeb411lRXQD-?usp=sharing\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\nshowing 98, 99 index data\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\nempty\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.17.0\r\n- Platform: Linux-5.4.144+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3712","id":1134252505,"node_id":"PR_kwDODunzps4ynVYy","number":3712,"title":"Fix the error of msr_sqa dataset","user":{"login":"Timothyxxx","id":47296835,"node_id":"MDQ6VXNlcjQ3Mjk2ODM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47296835?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Timothyxxx","html_url":"https:\/\/github.com\/Timothyxxx","followers_url":"https:\/\/api.github.com\/users\/Timothyxxx\/followers","following_url":"https:\/\/api.github.com\/users\/Timothyxxx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Timothyxxx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Timothyxxx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Timothyxxx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Timothyxxx\/orgs","repos_url":"https:\/\/api.github.com\/users\/Timothyxxx\/repos","events_url":"https:\/\/api.github.com\/users\/Timothyxxx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Timothyxxx\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-12T16:27:54Z","updated_at":"2022-02-12T16:27:54Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3712","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3712","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3712.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3712.patch","merged_at":null},"body":"Fix the error of _load_table_data function in msr_sqa dataset, it is wrong to use comma to split each row.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3712\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3711","id":1134050545,"node_id":"PR_kwDODunzps4ymmlK","number":3711,"title":"Fix the error of _load_table_data function in msr_sqa dataset","user":{"login":"Timothyxxx","id":47296835,"node_id":"MDQ6VXNlcjQ3Mjk2ODM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47296835?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Timothyxxx","html_url":"https:\/\/github.com\/Timothyxxx","followers_url":"https:\/\/api.github.com\/users\/Timothyxxx\/followers","following_url":"https:\/\/api.github.com\/users\/Timothyxxx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Timothyxxx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Timothyxxx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Timothyxxx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Timothyxxx\/orgs","repos_url":"https:\/\/api.github.com\/users\/Timothyxxx\/repos","events_url":"https:\/\/api.github.com\/users\/Timothyxxx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Timothyxxx\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-12T13:20:53Z","updated_at":"2022-02-12T13:30:43Z","closed_at":"2022-02-12T13:30:43Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3711","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3711","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3711.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3711.patch","merged_at":null},"body":"The _load_table_data function from the last version is wrong, it is wrong to use comma to split each row.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3711\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3710","id":1133955393,"node_id":"PR_kwDODunzps4ymQMQ","number":3710,"title":"Fix CI code quality issue","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-12T12:05:39Z","updated_at":"2022-02-12T12:58:05Z","closed_at":"2022-02-12T12:58:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3710","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3710","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3710.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3710.patch","merged_at":"2022-02-12T12:58:04Z"},"body":"Fix CI code quality issue introduced by #3695.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3710\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3709","id":1132997904,"node_id":"PR_kwDODunzps4yi0J4","number":3709,"title":"Set base path to hub url for canonical datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-11T19:23:20Z","updated_at":"2022-02-11T19:23:20Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3709","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3709","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3709.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3709.patch","merged_at":null},"body":"This should allow canonical datasets to use relative paths to download data files from the Hub\r\n\r\ncc @polinaeterna this will be useful if we have audio datasets that are canonical and for which you'd like to host data files","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3709\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3708","id":1132968402,"node_id":"I_kwDODunzps5Dh7nS","number":3708,"title":"Loading JSON gets stuck with many workers\/threads","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-11T18:50:48Z","updated_at":"2022-02-11T20:57:53Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nLoading a JSON dataset with `load_dataset` can get stuck when running on a machine with many CPUs. This is especially an issue when loading a large dataset on a large machine. \r\n\r\n\r\n## Steps to reproduce the bug\r\nI originally created the following script to reproduce the issue:\r\n```python\r\nfrom datasets import load_dataset\r\nfrom multiprocessing import Process\r\nfrom tqdm import tqdm\r\nimport datasets\r\nfrom transformers import set_seed\r\n\r\ndef run_tasks_in_parallel(tasks, ds_list):\r\n for _ in tqdm(range(1000)):\r\n print('new batch')\r\n running_tasks = [Process(target=task, args=(ds, i)) for i, (task, ds) in enumerate(zip(tasks, ds_list))]\r\n for running_task in running_tasks:\r\n running_task.start()\r\n for running_task in running_tasks:\r\n running_task.join()\r\n\r\ndef get_dataset():\r\n dataset_name = 'transformersbook\/codeparrot'\r\n ds = load_dataset(dataset_name+'-train', split=\"train\", streaming=True)\r\n ds = ds.shuffle(buffer_size=1000, seed=1)\r\n return iter(ds)\r\n\r\ndef get_next_element(ds, process_id, N=10000):\r\n for _ in range(N):\r\n _ = next(ds)['content']\r\n print(f'process {process_id} done')\r\n return\r\n\r\nset_seed(1)\r\ndatasets.utils.logging.set_verbosity_debug()\r\n\r\nn_processes = 8\r\ntasks = [get_next_element for _ in range(n_processes)]\r\nargs = [get_dataset() for _ in range(n_processes)]\r\nrun_tasks_in_parallel(tasks, args)\r\n```\r\n\r\nToday I noticed that it can happen when running it on a single process on a machine with many cores without streaming. So just `load_dataset(\"transformersbook\/codeparrot-train\")` alone might cause the issue after waiting long enough or trying many times. It's a slightly random process which makes it especially hard to track down. When I encountered it today it had already processed 17GB of data (the size of the cache folder when it got stuck) before getting stuck.\r\n\r\nHere's my current understanding of the error. As far as I can tell it happens in the following block: https:\/\/github.com\/huggingface\/datasets\/blob\/be701e9e89ab38022612c7263edc015bc7feaff9\/src\/datasets\/packaged_modules\/json\/json.py#L119-L139\r\n\r\nWhen the try on line 121 fails and the `block_size` is increased it can happen that it can't read the JSON again and gets stuck indefinitely. A hint that points in that direction is that increasing the `chunksize` argument decreases the chance of getting stuck and vice versa. Maybe it is an issue with a lock on the file that is not properly released.\r\n\r\n## Expected results\r\nRead a JSON before the end of the universe.\r\n\r\n## Actual results\r\nRead a JSON not before the end of the universe.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.3\r\n- Platform: Linux-4.19.0-18-cloud-amd64-x86_64-with-glibc2.28\r\n- Python version: 3.9.10\r\n- PyArrow version: 7.0.0\r\n\r\n@lhoestq we dicsussed this a while ago. @albertvillanova we discussed this today :) \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3708\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3707","id":1132741903,"node_id":"I_kwDODunzps5DhEUP","number":3707,"title":"`.select`: unexpected behavior with `indices`","user":{"login":"gabegma","id":36087158,"node_id":"MDQ6VXNlcjM2MDg3MTU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36087158?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gabegma","html_url":"https:\/\/github.com\/gabegma","followers_url":"https:\/\/api.github.com\/users\/gabegma\/followers","following_url":"https:\/\/api.github.com\/users\/gabegma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gabegma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gabegma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gabegma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gabegma\/orgs","repos_url":"https:\/\/api.github.com\/users\/gabegma\/repos","events_url":"https:\/\/api.github.com\/users\/gabegma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gabegma\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-11T15:20:01Z","updated_at":"2022-02-11T20:53:53Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe `.select` method will not throw when sending `indices` bigger than the dataset length; `indices` will be wrapped instead. This behavior is not documented anywhere, and is not intuitive. \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import Dataset\r\nds = Dataset.from_dict({\"text\": [\"d\", \"e\", \"f\"], \"label\": [4, 5, 6]})\r\nres1 = ds.select([1, 2, 3])['text']\r\nres2 = ds.select([1000])['text']\r\n```\r\n\r\n## Expected results\r\nBoth results should throw an `Error`.\r\n\r\n## Actual results\r\n`res1` will give `['e', 'f', 'd']`\r\n`res2` will give `['e']`\r\n\r\n## Environment info\r\nBug found from this environment:\r\n- `datasets` version: 1.16.1\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.7\r\n- PyArrow version: 6.0.1\r\n\r\nIt was also replicated on `master`.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3707\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3706","id":1132218874,"node_id":"I_kwDODunzps5DfEn6","number":3706,"title":"Unable to load dataset 'big_patent'","user":{"login":"ankitk2109","id":26432753,"node_id":"MDQ6VXNlcjI2NDMyNzUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26432753?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ankitk2109","html_url":"https:\/\/github.com\/ankitk2109","followers_url":"https:\/\/api.github.com\/users\/ankitk2109\/followers","following_url":"https:\/\/api.github.com\/users\/ankitk2109\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ankitk2109\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ankitk2109\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ankitk2109\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ankitk2109\/orgs","repos_url":"https:\/\/api.github.com\/users\/ankitk2109\/repos","events_url":"https:\/\/api.github.com\/users\/ankitk2109\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ankitk2109\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-02-11T09:48:34Z","updated_at":"2022-02-11T14:28:20Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nUnable to load the \"big_patent\" dataset\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset('big_patent', 'd', 'validation')\r\n```\r\n\r\n## Expected results\r\nDownload big_patents' validation split from the 'd' subset\r\n\r\n## Getting an error saying:\r\n{FileNotFoundError}Local file ..\\huggingface\\datasets\\downloads\\6159313604f4f2c01e7d1cac52139343b6c07f73f6de348d09be6213478455c5\\bigPatentData\\train.tar.gz doesn't exist\r\n\r\n## Environment info\r\n\r\n- `datasets` version:1.18.3\r\n- Platform: Windows\r\n- Python version:3.8\r\n- PyArrow version:7.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3706\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3705","id":1132053226,"node_id":"PR_kwDODunzps4yfhyj","number":3705,"title":"Raise informative error when loading a save_to_disk dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-11T08:21:03Z","updated_at":"2022-02-11T22:56:40Z","closed_at":"2022-02-11T22:56:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3705","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3705","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3705.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3705.patch","merged_at":"2022-02-11T22:56:39Z"},"body":"People recurrently report error when trying to load a dataset (using `load_dataset`) that was previously saved using `save_to_disk`.\r\n\r\nThis PR raises an informative error message telling them they should use `load_from_disk` instead.\r\n\r\nClose #3700.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3705\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3704","id":1132042631,"node_id":"I_kwDODunzps5DeZmH","number":3704,"title":"OSCAR-2109 datasets are misaligned and truncated","user":{"login":"adrianeboyd","id":5794899,"node_id":"MDQ6VXNlcjU3OTQ4OTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5794899?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adrianeboyd","html_url":"https:\/\/github.com\/adrianeboyd","followers_url":"https:\/\/api.github.com\/users\/adrianeboyd\/followers","following_url":"https:\/\/api.github.com\/users\/adrianeboyd\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adrianeboyd\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adrianeboyd\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adrianeboyd\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adrianeboyd\/orgs","repos_url":"https:\/\/api.github.com\/users\/adrianeboyd\/repos","events_url":"https:\/\/api.github.com\/users\/adrianeboyd\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adrianeboyd\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-02-11T08:14:59Z","updated_at":"2022-02-11T10:41:41Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nThe `oscar-corpus\/OSCAR-2109` data appears to be misaligned and truncated by the dataset builder for subsets that contain more than one part and for cases where the texts contain non-unix newlines.\r\n\r\n## Steps to reproduce the bug\r\n\r\nA few examples, although I'm not sure how deterministic the particular (mis)alignment is in various configurations:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"oscar-corpus\/OSCAR-2109\", \"deduplicated_fi\", split=\"train\", use_auth_token=True)\r\nentry = dataset[0]\r\n# entry[\"text\"] is from fi_part_3.txt.gz\r\n# entry[\"meta\"] is from fi_meta_part_2.jsonl.gz\r\n\r\ndataset = load_dataset(\"oscar-corpus\/OSCAR-2109\", \"deduplicated_no\", split=\"train\", use_auth_token=True)\r\nentry = dataset[900000]\r\n# entry[\"text\"] is from no_part_3.txt.gz and contains a blank line\r\n# entry[\"meta\"] is from no_meta_part_1.jsonl.gz\r\n\r\ndataset = load_dataset(\"oscar-corpus\/OSCAR-2109\", \"deduplicated_mk\", split=\"train\", streaming=True, use_auth_token=True)\r\n# 9088 texts in the dataset are empty\r\n```\r\n\r\nFor `deduplicated_fi`, all exported raw texts from the dataset are 17GB rather than 20GB as reported in the data splits overview table. The token count with `wc -w` for the raw texts is 2,067,556,874 rather than the expected 2,357,264,196 from the data splits table.\r\n\r\nFor `deduplicated_no` all exported raw texts contain 624,040,887 rather than the expected 776,354,517 tokens.\r\n\r\nFor `deduplicated_mk` it is 122,236,936 rather than 134,544,934 tokens. \r\n\r\nI'm not expecting the `wc -w` counts to line up exactly with the data splits table, but for comparison the `wc -w` count for `deduplicated_mk` on the raw texts is 134,545,424.\r\n\r\n## Issues\r\n\r\n* The meta \/ text files are not paired correctly when loading, so the extracted texts do not have the right offsets, the metadata is not associated with the correct text, and the text files may not be processed to the end or may be processed beyond the end (empty texts).\r\n* The line count offset is not reset per file so the texts aren't aligned to the right offsets in any parts beyond the first part, leading to truncation when in effect blank lines are not skipped.\r\n* Non-unix newline characters are treated as newlines when reading the text files while the metadata only counts unix newlines for its line offsets, leading to further misalignments between the metadata and the extracted texts, and which also results in truncation.\r\n\r\n## Expected results\r\n\r\nAll texts from the OSCAR release are extracted according to the metadata and aligned with the correct metadata.\r\n\r\n## Fixes\r\n\r\nNot necessarily the exact fixes\/checks you may want to use (I didn't test all languages or do any cross-platform testing, I'm not sure all the details are compatible with streaming), however to highlight the issues:\r\n\r\n```diff\r\ndiff --git a\/OSCAR-2109.py b\/OSCAR-2109.py\r\nindex bbac1076..5eee8de7 100644\r\n--- a\/OSCAR-2109.py\r\n+++ b\/OSCAR-2109.py\r\n@@ -20,6 +20,7 @@\r\n import collections\r\n import gzip\r\n import json\r\n+import os\r\n \r\n import datasets\r\n \r\n@@ -387,9 +388,20 @@ class Oscar2109(datasets.GeneratorBasedBuilder):\r\n with open(checksum_file, encoding=\"utf-8\") as f:\r\n data_filenames = [line.split()[1] for line in f if line]\r\n data_urls = [self.config.base_data_path + data_filename for data_filename in data_filenames]\r\n- text_files = dl_manager.download([url for url in data_urls if url.endswith(\".txt.gz\")])\r\n- metadata_files = dl_manager.download([url for url in data_urls if url.endswith(\".jsonl.gz\")])\r\n+ # sort filenames so corresponding parts are aligned\r\n+ text_files = sorted(dl_manager.download([url for url in data_urls if url.endswith(\".txt.gz\")]))\r\n+ metadata_files = sorted(dl_manager.download([url for url in data_urls if url.endswith(\".jsonl.gz\")]))\r\n+ assert len(text_files) == len(metadata_files)\r\n metadata_and_text_files = list(zip(metadata_files, text_files))\r\n+ for meta_path, text_path in metadata_and_text_files:\r\n+ # check that meta\/text part numbers are the same\r\n+ if \"part\" in os.path.basename(text_path):\r\n+ assert (\r\n+ os.path.basename(text_path).replace(\".txt.gz\", \"\").split(\"_\")[-1]\r\n+ == os.path.basename(meta_path).replace(\".jsonl.gz\", \"\").split(\"_\")[-1]\r\n+ )\r\n+ else:\r\n+ assert len(metadata_and_text_files) == 1\r\n return [\r\n datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={\"metadata_and_text_files\": metadata_and_text_files}),\r\n ]\r\n@@ -397,10 +409,14 @@ class Oscar2109(datasets.GeneratorBasedBuilder):\r\n def _generate_examples(self, metadata_and_text_files):\r\n \"\"\"This function returns the examples in the raw (text) form by iterating on all the files.\"\"\"\r\n id_ = 0\r\n- offset = 0\r\n for meta_path, text_path in metadata_and_text_files:\r\n+ # line offsets are per text file\r\n+ offset = 0\r\n logger.info(\"generating examples from = %s\", text_path)\r\n- with gzip.open(open(text_path, \"rb\"), \"rt\", encoding=\"utf-8\") as text_f:\r\n+ # some texts contain non-Unix newlines that should not be\r\n+ # interpreted as line breaks for the line counts in the metadata\r\n+ # with readline()\r\n+ with gzip.open(open(text_path, \"rb\"), \"rt\", encoding=\"utf-8\", newline=\"\\n\") as text_f:\r\n with gzip.open(open(meta_path, \"rb\"), \"rt\", encoding=\"utf-8\") as meta_f:\r\n for line in meta_f:\r\n # read meta\r\n@@ -411,7 +427,12 @@ class Oscar2109(datasets.GeneratorBasedBuilder):\r\n offset += 1\r\n text_f.readline()\r\n # read text\r\n- text = \"\".join([text_f.readline() for _ in range(meta[\"nb_sentences\"])]).rstrip()\r\n+ text_lines = [text_f.readline() for _ in range(meta[\"nb_sentences\"])]\r\n+ # all lines contain text (no blank lines or EOF)\r\n+ assert all(text_lines)\r\n+ assert \"\\n\" not in text_lines\r\n offset += meta[\"nb_sentences\"]\r\n+ # only strip the trailing newline\r\n+ text = \"\".join(text_lines).rstrip(\"\\n\")\r\n yield id_, {\"id\": id_, \"text\": text, \"meta\": meta}\r\n id_ += 1\r\n```\r\n\r\nI've tested this with a number of smaller deduplicated languages with 1-20 parts and the resulting datasets looked correct in terms of word count and size when compared to the data splits table and raw texts, and the text\/metadata alignments were correct in all my spot checks. However, there are many many languages I didn't test and I'm not sure that there aren't any texts containing blank lines in the corpus, for instance. For the cases I tested, the assertions related to blank lines and EOF made it easier to verify that the text and metadata were aligned as intended, since there would be little chance of spurious alignments of variable-length texts across so much data.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3704\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3703","id":1131882772,"node_id":"I_kwDODunzps5DdykU","number":3703,"title":"ImportError: To be able to use this metric, you need to install the following dependencies['seqeval'] using 'pip install seqeval' for instance'","user":{"login":"zhangyifei1","id":28425091,"node_id":"MDQ6VXNlcjI4NDI1MDkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28425091?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zhangyifei1","html_url":"https:\/\/github.com\/zhangyifei1","followers_url":"https:\/\/api.github.com\/users\/zhangyifei1\/followers","following_url":"https:\/\/api.github.com\/users\/zhangyifei1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zhangyifei1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zhangyifei1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zhangyifei1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zhangyifei1\/orgs","repos_url":"https:\/\/api.github.com\/users\/zhangyifei1\/repos","events_url":"https:\/\/api.github.com\/users\/zhangyifei1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zhangyifei1\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-11T06:38:42Z","updated_at":"2022-02-11T06:40:18Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"hi :\r\nI want to use the seqeval indicator because of direct load_ When metric ('seqeval '), it will prompt that the network connection fails. So I downloaded the seqeval Py to load locally. Loading code: metric = load_ metric(path='mymetric\/seqeval\/seqeval.py')\r\n\r\nBut tips:\r\n\r\nTraceback (most recent call last):\r\n File \"\/home\/ubuntu\/Python3.6_project\/zyf_project\/transformers\/examples\/pytorch\/token-classification\/run_ner.py\", line 604, in \r\n main()\r\n File \"\/home\/ubuntu\/Python3.6_project\/zyf_project\/transformers\/examples\/pytorch\/token-classification\/run_ner.py\", line 481, in main\r\n metric = load_metric(path='mymetric\/seqeval\/seqeval.py')\r\n File \"\/home\/ubuntu\/Python3.6_project\/zyf_project\/transformers_venv_0209\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 610, in load_metric\r\n dataset=False,\r\n File \"\/home\/ubuntu\/Python3.6_project\/zyf_project\/transformers_venv_0209\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 450, in prepare_module\r\n f\"To be able to use this {module_type}, you need to install the following dependencies\"\r\nImportError: To be able to use this metric, you need to install the following dependencies['seqeval'] using 'pip install seqeval' for instance'\r\n\r\n\r\n**What should I do? Please help me, thank you**\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3703\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3702","id":1130666707,"node_id":"PR_kwDODunzps4yahKc","number":3702,"title":"Update the address to use https","user":{"login":"yazdanbakhsh","id":7105134,"node_id":"MDQ6VXNlcjcxMDUxMzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7105134?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yazdanbakhsh","html_url":"https:\/\/github.com\/yazdanbakhsh","followers_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/followers","following_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/orgs","repos_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/repos","events_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yazdanbakhsh\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T18:46:30Z","updated_at":"2022-02-10T18:46:30Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3702","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3702","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3702.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3702.patch","merged_at":null},"body":"The http address doesn't work anymore","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3702\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3701","id":1130498738,"node_id":"PR_kwDODunzps4yZ8Dw","number":3701,"title":"Pin ElasticSearch","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T17:15:26Z","updated_at":"2022-02-10T17:31:13Z","closed_at":"2022-02-10T17:31:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3701","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3701","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3701.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3701.patch","merged_at":"2022-02-10T17:31:12Z"},"body":"Until we manage to support ES 8.0, I'm setting the version to `<8.0.0`\r\n\r\nCurrently we're getting this error on 8.0:\r\n```python\r\nValueError: Either 'hosts' or 'cloud_id' must be specified\r\n```\r\nWhen instantiating a `Elasticsearch()` object","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3701\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3700","id":1130252496,"node_id":"I_kwDODunzps5DXkjQ","number":3700,"title":"Unable to load a dataset","user":{"login":"PaulchauvinAI","id":97964230,"node_id":"U_kgDOBdbQxg","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/97964230?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulchauvinAI","html_url":"https:\/\/github.com\/PaulchauvinAI","followers_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/followers","following_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/repos","events_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulchauvinAI\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-10T15:05:53Z","updated_at":"2022-02-11T22:56:39Z","closed_at":"2022-02-11T22:56:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nUnable to load a dataset from Huggingface that I have just saved.\r\n\r\n\r\n## Steps to reproduce the bug\r\nOn Google colab\r\n`! pip install datasets `\r\n`from datasets import load_dataset`\r\n`my_path = \"wiki_dataset\"`\r\n`dataset = load_dataset('wikipedia', \"20200501.fr\")`\r\n`dataset.save_to_disk(my_path)`\r\n`dataset = load_dataset(my_path)`\r\n\r\n\r\n## Expected results\r\nLoading the dataset\r\n\r\n## Actual results\r\nValueError: Couldn't cast\r\n_data_files: list>\r\n child 0, item: struct\r\n child 0, filename: string\r\n_fingerprint: string\r\n_format_columns: null\r\n_format_kwargs: struct<>\r\n_format_type: null\r\n_indexes: struct<>\r\n_output_all_columns: bool\r\n_split: string\r\nto\r\n{'builder_name': Value(dtype='string', id=None), 'citation': Value(dtype='string', id=None), 'config_name': Value(dtype='string', id=None), 'dataset_size': Value(dtype='int64', id=None), 'description': Value(dtype='string', id=None), 'download_checksums': {}, 'download_size': Value(dtype='int64', id=None), 'features': {'title': {'dtype': Value(dtype='string', id=None), 'id': Value(dtype='null', id=None), '_type': Value(dtype='string', id=None)}, 'text': {'dtype': Value(dtype='string', id=None), 'id': Value(dtype='null', id=None), '_type': Value(dtype='string', id=None)}}, 'homepage': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'post_processed': Value(dtype='null', id=None), 'post_processing_size': Value(dtype='null', id=None), 'size_in_bytes': Value(dtype='int64', id=None), 'splits': {'train': {'name': Value(dtype='string', id=None), 'num_bytes': Value(dtype='int64', id=None), 'num_examples': Value(dtype='int64', id=None), 'dataset_name': Value(dtype='string', id=None)}}, 'supervised_keys': Value(dtype='null', id=None), 'task_templates': Value(dtype='null', id=None), 'version': {'version_str': Value(dtype='string', id=None), 'description': Value(dtype='string', id=None), 'major': Value(dtype='int64', id=None), 'minor': Value(dtype='int64', id=None), 'patch': Value(dtype='int64', id=None)}}\r\nbecause column names don't match\r\n\r\n## Environment info\r\n- `datasets` version: 1.18.3\r\n- Platform: Linux-5.4.144+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 6.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3700\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3699","id":1130200593,"node_id":"PR_kwDODunzps4yY49I","number":3699,"title":"Add dev-only config to Natural Questions dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-10T14:42:24Z","updated_at":"2022-02-11T09:50:22Z","closed_at":"2022-02-11T09:50:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3699","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3699","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3699.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3699.patch","merged_at":"2022-02-11T09:50:21Z"},"body":"As suggested by @lhoestq and @thomwolf, a new config has been added to Natural Questions dataset, so that only dev split can be downloaded. \r\n\r\nFix #413.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3699\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3698","id":1129864282,"node_id":"PR_kwDODunzps4yXtyQ","number":3698,"title":"Add finetune-data CodeFill ","user":{"login":"rgismondi","id":49989029,"node_id":"MDQ6VXNlcjQ5OTg5MDI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49989029?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rgismondi","html_url":"https:\/\/github.com\/rgismondi","followers_url":"https:\/\/api.github.com\/users\/rgismondi\/followers","following_url":"https:\/\/api.github.com\/users\/rgismondi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rgismondi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rgismondi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rgismondi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rgismondi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rgismondi\/repos","events_url":"https:\/\/api.github.com\/users\/rgismondi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rgismondi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T11:12:51Z","updated_at":"2022-02-10T11:12:51Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3698","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3698","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3698.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3698.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3698\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3697","id":1129795724,"node_id":"PR_kwDODunzps4yXeXo","number":3697,"title":"Add code-fill datasets for pretraining\/finetuning\/evaluating","user":{"login":"rgismondi","id":49989029,"node_id":"MDQ6VXNlcjQ5OTg5MDI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49989029?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rgismondi","html_url":"https:\/\/github.com\/rgismondi","followers_url":"https:\/\/api.github.com\/users\/rgismondi\/followers","following_url":"https:\/\/api.github.com\/users\/rgismondi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rgismondi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rgismondi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rgismondi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rgismondi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rgismondi\/repos","events_url":"https:\/\/api.github.com\/users\/rgismondi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rgismondi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T10:31:48Z","updated_at":"2022-02-10T11:00:44Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3697","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3697","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3697.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3697.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3697\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3696","id":1129764534,"node_id":"PR_kwDODunzps4yXXgH","number":3696,"title":"Force unique keys in newsqa dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T10:09:19Z","updated_at":"2022-02-10T10:09:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3696","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3696","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3696.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3696.patch","merged_at":null},"body":"Currently, it may raise `DuplicatedKeysError`.\r\n\r\nFix #3630.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3696\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3695","id":1129730148,"node_id":"PR_kwDODunzps4yXP44","number":3695,"title":"Fix ClassLabel to\/from dict when passed names_file","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-10T09:47:10Z","updated_at":"2022-02-11T23:02:32Z","closed_at":"2022-02-11T23:02:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3695","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3695","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3695.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3695.patch","merged_at":"2022-02-11T23:02:31Z"},"body":"Currently, `names_file` is a field of the data class `ClassLabel`, thus appearing when transforming it to dict (when saving infos). Afterwards, when trying to read it from infos, it conflicts with the other field `names`.\r\n\r\nThis PR, removes `names_file` as a field of the data class `ClassLabel`.\r\n- it is only used at instantiation to generate the `labels` field\r\n\r\nFix #3631.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3695\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3693","id":1128554365,"node_id":"PR_kwDODunzps4yTTcQ","number":3693,"title":"Standardize to `Example::`","user":{"login":"mishig25","id":11827707,"node_id":"MDQ6VXNlcjExODI3NzA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11827707?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mishig25","html_url":"https:\/\/github.com\/mishig25","followers_url":"https:\/\/api.github.com\/users\/mishig25\/followers","following_url":"https:\/\/api.github.com\/users\/mishig25\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mishig25\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mishig25\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mishig25\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mishig25\/orgs","repos_url":"https:\/\/api.github.com\/users\/mishig25\/repos","events_url":"https:\/\/api.github.com\/users\/mishig25\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mishig25\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-09T13:37:13Z","updated_at":"2022-02-09T13:37:13Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3693","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3693","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3693.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3693.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3693\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3692","id":1128320004,"node_id":"PR_kwDODunzps4yShiu","number":3692,"title":"Update data URL in pubmed dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-09T10:06:21Z","updated_at":"2022-02-10T14:58:00Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3692","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3692","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3692.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3692.patch","merged_at":null},"body":"Fix #3655.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3692\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3691","id":1127629306,"node_id":"PR_kwDODunzps4yQThV","number":3691,"title":"Upgrade black to version ~=22.0","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-08T18:45:19Z","updated_at":"2022-02-08T19:56:40Z","closed_at":"2022-02-08T19:56:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3691","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3691","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3691.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3691.patch","merged_at":"2022-02-08T19:56:39Z"},"body":"Upgrades the `datasets` library quality tool `black` to use the first stable release of `black`, version 22.0.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3691\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3690","id":1127493538,"node_id":"PR_kwDODunzps4yP2p5","number":3690,"title":"WIP: update docs to new frontend\/UI","user":{"login":"mishig25","id":11827707,"node_id":"MDQ6VXNlcjExODI3NzA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11827707?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mishig25","html_url":"https:\/\/github.com\/mishig25","followers_url":"https:\/\/api.github.com\/users\/mishig25\/followers","following_url":"https:\/\/api.github.com\/users\/mishig25\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mishig25\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mishig25\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mishig25\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mishig25\/orgs","repos_url":"https:\/\/api.github.com\/users\/mishig25\/repos","events_url":"https:\/\/api.github.com\/users\/mishig25\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mishig25\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-08T16:38:09Z","updated_at":"2022-02-11T16:22:10Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3690","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3690","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3690.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3690.patch","merged_at":null},"body":"### TLDR: Update `datasets` `docs` to the new syntax & frontend (as how it looks on [hf.co\/transformers](https:\/\/huggingface.co\/docs\/transformers\/index))\r\n\r\n## Checklist\r\n\r\n- [ ] update datasets docs to new syntax (should call `doc-builder convert`) (this PR)\r\n- [x] discuss `@property` methods frontend https:\/\/github.com\/huggingface\/doc-builder\/pull\/87\r\n- [x] discuss `inject_arrow_table_documentation` (this PR) https:\/\/github.com\/huggingface\/datasets\/pull\/3690#discussion_r801847860\r\n- [x] update datasets docs path on moon-landing https:\/\/github.com\/huggingface\/moon-landing\/pull\/2089\r\n- [ ] update nginx `docs\/datasets` to route to moon-landing (do similar to internal repo # 81)\r\n- [x] convert pyarrow docstring from Numpydoc style to groups style https:\/\/github.com\/huggingface\/doc-builder\/pull\/89(https:\/\/stackoverflow.com\/a\/24385103\/6558628)\r\n- [x] handle `Raises` section on frontend and doc-builder https:\/\/github.com\/huggingface\/doc-builder\/pull\/86\r\n- [x] check imgs path (this PR) (nothing to update here)\r\n- [ ] delete sphinx related files (this PR)\r\n- [ ] update github actions (doc quality check & PR doc)\r\n- [x] doc exaples block has to follow format `Examples::` https:\/\/github.com\/huggingface\/datasets\/pull\/3693\r\n- [x] add `versions.yml` in doc-build https:\/\/github.com\/huggingface\/doc-build\/pull\/1\r\n- [ ] add `versions.yml` in doc-build-dev ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3690\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3689","id":1127422478,"node_id":"PR_kwDODunzps4yPnp7","number":3689,"title":"Fix streaming for servers not supporting HTTP range requests","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2022-02-08T15:41:05Z","updated_at":"2022-02-10T16:51:25Z","closed_at":"2022-02-10T16:51:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3689","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3689","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3689.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3689.patch","merged_at":"2022-02-10T16:51:24Z"},"body":"Some servers do not support HTTP range requests, whereas this is required to stream some file formats (like ZIP).\r\n\r\n~~This PR implements a workaround for those cases, by download the files locally in a temporary directory (cleaned up by the OS once the process is finished).~~\r\n\r\nThis PR raises custom error explaining that streaming is not possible because data host server does not support HTTP range requests.\r\n\r\nFix #3677.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3689\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3688","id":1127218321,"node_id":"I_kwDODunzps5DL_yR","number":3688,"title":"Pyarrow version error","user":{"login":"Zaker237","id":49993443,"node_id":"MDQ6VXNlcjQ5OTkzNDQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49993443?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Zaker237","html_url":"https:\/\/github.com\/Zaker237","followers_url":"https:\/\/api.github.com\/users\/Zaker237\/followers","following_url":"https:\/\/api.github.com\/users\/Zaker237\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Zaker237\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Zaker237\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Zaker237\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Zaker237\/orgs","repos_url":"https:\/\/api.github.com\/users\/Zaker237\/repos","events_url":"https:\/\/api.github.com\/users\/Zaker237\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Zaker237\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2022-02-08T12:53:59Z","updated_at":"2022-02-09T06:35:33Z","closed_at":"2022-02-09T06:35:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI installed datasets(version 1.17.0, 1.18.0, 1.18.3) but i'm right now nor able to import it because of pyarrow. when i try to import it, i get the following error:\r\n`To use datasets, the module pyarrow>=3.0.0 is required, and the current version of pyarrow doesn't match this condition`.\r\ni tryed with all version of pyarrow execpt `4.0.0` but still get the same error.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nAttributeError Traceback (most recent call last)\r\n in \r\n----> 1 import datasets\r\n\r\n~\\AppData\\Local\\Continuum\\anaconda3\\lib\\site-packages\\datasets\\__init__.py in \r\n 26 \r\n 27 \r\n---> 28 if _version.parse(pyarrow.__version__).major < 3:\r\n 29 raise ImportWarning(\r\n 30 \"To use `datasets`, the module `pyarrow>=3.0.0` is required, and the current version of `pyarrow` doesn't match this condition.\\n\"\r\n\r\nAttributeError: 'Version' object has no attribute 'major'\r\n\r\n## Environment info\r\nTraceback (most recent call last):\r\n File \"c:\\users\\alex\\appdata\\local\\continuum\\anaconda3\\lib\\runpy.py\", line 193, in _run_module_as_main\r\n \"__main__\", mod_spec)\r\n File \"c:\\users\\alex\\appdata\\local\\continuum\\anaconda3\\lib\\runpy.py\", line 85, in _run_code\r\n exec(code, run_globals)\r\n File \"C:\\Users\\Alex\\AppData\\Local\\Continuum\\anaconda3\\Scripts\\datasets-cli.exe\\__main__.py\", line 5, in \r\n File \"c:\\users\\alex\\appdata\\local\\continuum\\anaconda3\\lib\\site-packages\\datasets\\__init__.py\", line 28, in \r\n if _version.parse(pyarrow.__version__).major < 3:\r\nAttributeError: 'Version' object has no attribute 'major'\r\n\r\n- `datasets` version:\r\n- Platform: Linux(Ubuntu) and Windows: conda on the both\r\n- Python version: 3.7\r\n- PyArrow version: 7.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3688\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3687","id":1127154766,"node_id":"I_kwDODunzps5DLwRO","number":3687,"title":"Can't get the text data when calling to_tf_dataset","user":{"login":"phrasenmaeher","id":82086367,"node_id":"MDQ6VXNlcjgyMDg2MzY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/82086367?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/phrasenmaeher","html_url":"https:\/\/github.com\/phrasenmaeher","followers_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/followers","following_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/orgs","repos_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/repos","events_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/phrasenmaeher\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"assignees":[{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2022-02-08T11:52:10Z","updated_at":"2022-02-08T16:54:55Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am working with the SST2 dataset, and am using TensorFlow 2.5\r\nI'd like to convert it to a `tf.data.Dataset` by calling the `to_tf_dataset` method.\r\nThe following snippet is what I am using to achieve this:\r\n\r\n```\r\nfrom datasets import load_dataset\r\nfrom transformers import DefaultDataCollator\r\n\r\ndata_collator = DefaultDataCollator(return_tensors=\"tf\")\r\ndataset = load_dataset(\"sst\")\r\n\r\ntrain_dataset = dataset[\"train\"].to_tf_dataset(columns=['sentence'], label_cols=\"label\", shuffle=True, batch_size=8,collate_fn=data_collator)\r\n```\r\nHowever, this only gets me the labels; the text--the most important part--is missing:\r\n\r\n```\r\nfor s in train_dataset.take(1):\r\n print(s) #prints something like: ({}, )\r\n```\r\n\r\nAs you can see, it only returns the label part, not the data, as indicated by the empty dictionary, `{}`. So far, I've played with various settings of the method arguments, but to no avail; I do not want to perform any text processing at this time. On my quest to achieve what I want ( a `tf.data.Dataset`), I've consulted these resources:\r\n\r\n[https:\/\/www.philschmid.de\/huggingface-transformers-keras-tf](https:\/\/www.philschmid.de\/huggingface-transformers-keras-tf)\r\n[https:\/\/huggingface.co\/docs\/datasets\/use_dataset.html?highlight=tensorflow](https:\/\/huggingface.co\/docs\/datasets\/use_dataset.html?highlight=tensorflow)\r\n\r\nI was surprised to not find more extensive examples on how to transform a Hugginface dataset to one compatible with TensorFlow.\r\n\r\nIf you could point me to where I am going wrong, please do so.\r\nThanks in advance for your support.\r\n\r\n---\r\nEdit: In the [docs](https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html#datasets.Dataset.to_tf_dataset), I found the following description:\r\n\r\n\r\n_In general, only columns that the model can use as input should be included here (numeric data only)._\r\n\r\nDoes this imply that no textual, i.e., `string` data can be loaded?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3687\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3686","id":1127137290,"node_id":"I_kwDODunzps5DLsAK","number":3686,"title":"`Translation` features cannot be `flatten`ed","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-02-08T11:33:48Z","updated_at":"2022-02-08T13:52:34Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n(`Dataset.flatten`)[https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/arrow_dataset.py#L1265] fails for columns with feature (`Translation`)[https:\/\/github.com\/huggingface\/datasets\/blob\/3edbeb0ec6519b79f1119adc251a1a6b379a2c12\/src\/datasets\/features\/translation.py#L8]\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"europa_ecdc_tm\", \"en2fr\", split=\"train[:10]\")\r\nprint(dataset.features)\r\n# {'translation': Translation(languages=['en', 'fr'], id=None)}\r\nprint(dataset[0])\r\n# {'translation': {'en': 'Vaccination against hepatitis C is not yet available.', 'fr': 'Aucune vaccination contre l\u2019h\u00e9patite C n\u2019est encore disponible.'}}\r\n\r\ndataset.flatten()\r\n```\r\n\r\n## Expected results\r\n\r\n`dataset.flatten` should flatten the `Translation` column as if it were a dict of `Value(\"string\")`\r\n\r\n```python\r\ndataset[0]\r\n# {'translation.en': 'Vaccination against hepatitis C is not yet available.', 'translation.fr': 'Aucune vaccination contre l\u2019h\u00e9patite C n\u2019est encore disponible.' }\r\ndataset.features\r\n# {'translation.en': Value(\"string\"), 'translation.fr': Value(\"string\")}\r\n```\r\n\r\n## Actual results\r\n\r\n```python\r\nIn [31]: dset.flatten()\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n in \r\n----> 1 dset.flatten()\r\n\r\n[...]\\site-packages\\datasets\\fingerprint.py in wrapper(*args, **kwargs)\r\n 411 # Call actual function\r\n 412\r\n--> 413 out = func(self, *args, **kwargs)\r\n 414\r\n 415 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n[...]\\site-packages\\datasets\\arrow_dataset.py in flatten(self, new_fingerprint, max_depth)\r\n 1294 break\r\n 1295 dataset.info.features = self.features.flatten(max_depth=max_depth)\r\n-> 1296 dataset._data = update_metadata_with_features(dataset._data, dataset.features)\r\n 1297 logger.info(f'Flattened dataset from depth {depth} to depth {1 if depth + 1 < max_depth else \"unknown\"}.')\r\n 1298 dataset._fingerprint = new_fingerprint\r\n\r\n[...]\\site-packages\\datasets\\arrow_dataset.py in update_metadata_with_features(table, features)\r\n 534 def update_metadata_with_features(table: Table, features: Features):\r\n 535 \"\"\"To be used in dataset transforms that modify the features of the dataset, in order to update the features stored in the metadata of its schema.\"\"\"\r\n--> 536 features = Features({col_name: features[col_name] for col_name in table.column_names})\r\n 537 if table.schema.metadata is None or b\"huggingface\" not in table.schema.metadata:\r\n 538 pa_metadata = ArrowWriter._build_metadata(DatasetInfo(features=features))\r\n\r\n[...]\\site-packages\\datasets\\arrow_dataset.py in (.0)\r\n 534 def update_metadata_with_features(table: Table, features: Features):\r\n 535 \"\"\"To be used in dataset transforms that modify the features of the dataset, in order to update the features stored in the metadata of its schema.\"\"\"\r\n--> 536 features = Features({col_name: features[col_name] for col_name in table.column_names})\r\n 537 if table.schema.metadata is None or b\"huggingface\" not in table.schema.metadata:\r\n 538 pa_metadata = ArrowWriter._build_metadata(DatasetInfo(features=features))\r\n\r\nKeyError: 'translation.en'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.3\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3686\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3685","id":1126240444,"node_id":"PR_kwDODunzps4yLw3m","number":3685,"title":"Add support for `Audio` and `Image` feature in `push_to_hub`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-07T16:47:16Z","updated_at":"2022-02-11T19:40:00Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3685","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3685","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3685.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3685.patch","merged_at":null},"body":"Add support for the `Audio` and the `Image` feature in `push_to_hub`. \r\n\r\nThe idea is to remove local path information and store file content under \"bytes\" in the Arrow table before the push.\r\n\r\nMy initial approach (https:\/\/github.com\/huggingface\/datasets\/commit\/34c652afeff9686b6b8bf4e703c84d2205d670aa) was to use a map transform similar to [`decode_nested_example`](https:\/\/github.com\/huggingface\/datasets\/blob\/5e0f6068741464f833ff1802e24ecc2064aaea9f\/src\/datasets\/features\/features.py#L1023-L1056) while having decoding turned off, but I wasn't satisfied with the code quality, so I ended up using the `temporary_assignment` decorator to override `cast_storage`, which allows me to directly modify the underlying storage (the final op is similar to `Dataset.cast`) and results in a much simpler code. \r\n\r\nAdditionally, I added the `allow_cast` flag that can disable this behavior in the situations where it's not needed (e.g. the dataset is already in the correct format for the Hub, etc.)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3685\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3684","id":1125133664,"node_id":"PR_kwDODunzps4yIOer","number":3684,"title":"[fix]: iwslt2017 download urls","user":{"login":"msarmi9","id":48395294,"node_id":"MDQ6VXNlcjQ4Mzk1Mjk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48395294?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/msarmi9","html_url":"https:\/\/github.com\/msarmi9","followers_url":"https:\/\/api.github.com\/users\/msarmi9\/followers","following_url":"https:\/\/api.github.com\/users\/msarmi9\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/msarmi9\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/msarmi9\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/msarmi9\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/msarmi9\/orgs","repos_url":"https:\/\/api.github.com\/users\/msarmi9\/repos","events_url":"https:\/\/api.github.com\/users\/msarmi9\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/msarmi9\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-02-06T07:56:55Z","updated_at":"2022-02-09T08:39:31Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3684","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3684","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3684.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3684.patch","merged_at":null},"body":"Fixes #2076.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3684\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3683","id":1124458371,"node_id":"PR_kwDODunzps4yGKoj","number":3683,"title":"added told-br (brazilian hate speech) dataset","user":{"login":"JAugusto97","id":26556320,"node_id":"MDQ6VXNlcjI2NTU2MzIw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26556320?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JAugusto97","html_url":"https:\/\/github.com\/JAugusto97","followers_url":"https:\/\/api.github.com\/users\/JAugusto97\/followers","following_url":"https:\/\/api.github.com\/users\/JAugusto97\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JAugusto97\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JAugusto97\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JAugusto97\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JAugusto97\/orgs","repos_url":"https:\/\/api.github.com\/users\/JAugusto97\/repos","events_url":"https:\/\/api.github.com\/users\/JAugusto97\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JAugusto97\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-04T17:44:32Z","updated_at":"2022-02-07T21:14:52Z","closed_at":"2022-02-07T21:14:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3683","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3683","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3683.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3683.patch","merged_at":"2022-02-07T21:14:52Z"},"body":"Hey,\r\n\r\nAdding ToLD-Br. Feel free to ask for modifications. \r\n\r\nThanks!!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3683\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3682","id":1124434330,"node_id":"PR_kwDODunzps4yGFml","number":3682,"title":"adding told-br for toxic\/abusive hatespeech detection","user":{"login":"JAugusto97","id":26556320,"node_id":"MDQ6VXNlcjI2NTU2MzIw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26556320?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JAugusto97","html_url":"https:\/\/github.com\/JAugusto97","followers_url":"https:\/\/api.github.com\/users\/JAugusto97\/followers","following_url":"https:\/\/api.github.com\/users\/JAugusto97\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JAugusto97\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JAugusto97\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JAugusto97\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JAugusto97\/orgs","repos_url":"https:\/\/api.github.com\/users\/JAugusto97\/repos","events_url":"https:\/\/api.github.com\/users\/JAugusto97\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JAugusto97\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-04T17:18:29Z","updated_at":"2022-02-07T03:23:24Z","closed_at":"2022-02-04T17:36:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3682","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3682","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3682.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3682.patch","merged_at":null},"body":"Hey, \r\n\r\nI'm adding our dataset from our paper published at AACL 2020. Feel free to ask for modifications.\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3682\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3681","id":1124237458,"node_id":"PR_kwDODunzps4yFcpM","number":3681,"title":"Fix TestCommand to move dataset_infos instead of copying","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2022-02-04T14:01:52Z","updated_at":"2022-02-04T18:47:16Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3681","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3681","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3681.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3681.patch","merged_at":null},"body":"Why do we copy instead of moving the file?\r\n\r\nCC: @lhoestq @lvwerra ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3681\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3680","id":1124213416,"node_id":"PR_kwDODunzps4yFXm8","number":3680,"title":"Fix TestCommand to copy dataset_infos to local dir with only data files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-04T13:36:46Z","updated_at":"2022-02-08T10:32:55Z","closed_at":"2022-02-08T10:32:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3680","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3680","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3680.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3680.patch","merged_at":"2022-02-08T10:32:55Z"},"body":"Currently this case is missed.\r\n\r\nCC: @lvwerra ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3680\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3679","id":1124062133,"node_id":"I_kwDODunzps5C_9O1","number":3679,"title":"Download datasets from a private hub","user":{"login":"juliensimon","id":3436143,"node_id":"MDQ6VXNlcjM0MzYxNDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3436143?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/juliensimon","html_url":"https:\/\/github.com\/juliensimon","followers_url":"https:\/\/api.github.com\/users\/juliensimon\/followers","following_url":"https:\/\/api.github.com\/users\/juliensimon\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/juliensimon\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/juliensimon\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/juliensimon\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/juliensimon\/orgs","repos_url":"https:\/\/api.github.com\/users\/juliensimon\/repos","events_url":"https:\/\/api.github.com\/users\/juliensimon\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/juliensimon\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3814924348,"node_id":"LA_kwDODunzps7jYyA8","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/private-hub","name":"private-hub","color":"A929D8","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-04T10:49:06Z","updated_at":"2022-02-09T15:04:25Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the context of a private hub deployment, customers would like to use load_dataset() to load datasets from their hub, not from the public hub. This doesn't seem to be configurable at the moment and it would be nice to add this feature.\r\n\r\nThe obvious workaround is to clone the repo first and then load it from local storage, but this adds an extra step. It'd be great to have the same experience regardless of where the hub is hosted.\r\n\r\nThe same issue exists with the transformers library and the CLI. I'm going to create issues there as well, and I'll reference them below.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3679\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3678","id":1123402426,"node_id":"PR_kwDODunzps4yCt91","number":3678,"title":"Add code example in wikipedia card","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-03T18:09:02Z","updated_at":"2022-02-04T13:21:39Z","closed_at":"2022-02-04T13:21:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3678","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3678","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3678.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3678.patch","merged_at":"2022-02-04T13:21:39Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3678\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3677","id":1123192866,"node_id":"I_kwDODunzps5C8pAi","number":3677,"title":"Discovery cannot be streamed anymore","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-02-03T15:02:03Z","updated_at":"2022-02-10T16:51:24Z","closed_at":"2022-02-10T16:51:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\niterable_dataset = load_dataset(\"discovery\", name=\"discovery\", split=\"train\", streaming=True)\r\nlist(iterable_dataset.take(1))\r\n```\r\n\r\n## Expected results\r\n\r\nThe first row of the train split.\r\n\r\n## Actual results\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/iterable_dataset.py\", line 365, in __iter__\r\n for key, example in self._iter():\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/iterable_dataset.py\", line 362, in _iter\r\n yield from ex_iterable\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/iterable_dataset.py\", line 272, in __iter__\r\n yield from islice(self.ex_iterable, self.n)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/iterable_dataset.py\", line 79, in __iter__\r\n yield from self.generate_examples_fn(**self.kwargs)\r\n File \"\/home\/slesage\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/discovery\/542fab7a9ddc1d9726160355f7baa06a1ccc44c40bc8e12c09e9bc743aca43a2\/discovery.py\", line 333, in _generate_examples\r\n with open(data_file, encoding=\"utf8\") as f:\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/streaming.py\", line 64, in wrapper\r\n return function(*args, use_auth_token=use_auth_token, **kwargs)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/utils\/streaming_download_manager.py\", line 369, in xopen\r\n file_obj = fsspec.open(file, mode=mode, *args, **kwargs).open()\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/core.py\", line 456, in open\r\n return open_files(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/core.py\", line 288, in open_files\r\n fs, fs_token, paths = get_fs_token_paths(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/core.py\", line 611, in get_fs_token_paths\r\n fs = filesystem(protocol, **inkwargs)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/registry.py\", line 253, in filesystem\r\n return cls(**storage_options)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/spec.py\", line 68, in __call__\r\n obj = super().__call__(*args, **kwargs)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/implementations\/zip.py\", line 57, in __init__\r\n self.zip = zipfile.ZipFile(self.fo)\r\n File \"\/home\/slesage\/.pyenv\/versions\/3.9.6\/lib\/python3.9\/zipfile.py\", line 1257, in __init__\r\n self._RealGetContents()\r\n File \"\/home\/slesage\/.pyenv\/versions\/3.9.6\/lib\/python3.9\/zipfile.py\", line 1320, in _RealGetContents\r\n endrec = _EndRecData(fp)\r\n File \"\/home\/slesage\/.pyenv\/versions\/3.9.6\/lib\/python3.9\/zipfile.py\", line 263, in _EndRecData\r\n fpin.seek(0, 2)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/fsspec\/implementations\/http.py\", line 676, in seek\r\n raise ValueError(\"Cannot seek streaming HTTP file\")\r\nValueError: Cannot seek streaming HTTP file\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.3\r\n- Platform: Linux-5.11.0-1027-aws-x86_64-with-glibc2.31\r\n- Python version: 3.9.6\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3677\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3676","id":1123096362,"node_id":"I_kwDODunzps5C8Rcq","number":3676,"title":"`None` replaced by `[]` after first batch in map","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-02-03T13:36:48Z","updated_at":"2022-02-03T16:30:52Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Sometimes `None` can be replaced by `[]` when running map:\r\n\r\n```python\r\nfrom datasets import Dataset\r\n\r\nds = Dataset.from_dict({\"a\": range(4)})\r\nds = ds.map(lambda x: {\"b\": [[None, [0]]]}, batched=True, batch_size=1, remove_columns=[\"a\"])\r\nprint(ds.to_pandas())\r\n# b\r\n# 0 [None, [0]]\r\n# 1 [[], [0]]\r\n# 2 [[], [0]]\r\n# 3 [[], [0]]\r\n```\r\n\r\nThis issue has been experienced when running the `run_qa.py` example from `transformers` (see issue https:\/\/github.com\/huggingface\/transformers\/issues\/15401)\r\n\r\nThis can be due to a bug in when casting `None` in nested lists. Casting only happens after the first batch, since the first batch is used to infer the feature types.\r\n\r\ncc @sgugger ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3676\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3675","id":1123078408,"node_id":"I_kwDODunzps5C8NEI","number":3675,"title":"Add CodeContests dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-02-03T13:20:00Z","updated_at":"2022-02-10T20:50:38Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** CodeContests\r\n- **Description:** CodeContests is a competitive programming dataset for machine-learning.\r\n- **Paper:**\r\n- **Data:** https:\/\/github.com\/deepmind\/code_contests\r\n- **Motivation:** This dataset was used when training [AlphaCode](https:\/\/deepmind.com\/blog\/article\/Competitive-programming-with-AlphaCode).\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3675\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3674","id":1123027874,"node_id":"PR_kwDODunzps4yBe17","number":3674,"title":"Add FrugalScore metric","user":{"login":"moussaKam","id":28675016,"node_id":"MDQ6VXNlcjI4Njc1MDE2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28675016?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/moussaKam","html_url":"https:\/\/github.com\/moussaKam","followers_url":"https:\/\/api.github.com\/users\/moussaKam\/followers","following_url":"https:\/\/api.github.com\/users\/moussaKam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/moussaKam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/moussaKam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/moussaKam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/moussaKam\/orgs","repos_url":"https:\/\/api.github.com\/users\/moussaKam\/repos","events_url":"https:\/\/api.github.com\/users\/moussaKam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/moussaKam\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-03T12:28:52Z","updated_at":"2022-02-08T15:28:56Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3674","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3674","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3674.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3674.patch","merged_at":null},"body":"This pull request add FrugalScore metric for NLG systems evaluation.\r\n\r\nFrugalScore is a reference-based metric for NLG models evaluation. It is based on a distillation approach that allows to learn a fixed, low cost version of any expensive NLG metric, while retaining most of its original performance.\r\n\r\nPaper: https:\/\/arxiv.org\/abs\/2110.08559?context=cs\r\nGithub: https:\/\/github.com\/moussaKam\/FrugalScore\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3674\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3673","id":1123010520,"node_id":"I_kwDODunzps5C78fY","number":3673,"title":"`load_dataset(\"snli\")` is different from dataset viewer","user":{"login":"pietrolesci","id":61748653,"node_id":"MDQ6VXNlcjYxNzQ4NjUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/61748653?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pietrolesci","html_url":"https:\/\/github.com\/pietrolesci","followers_url":"https:\/\/api.github.com\/users\/pietrolesci\/followers","following_url":"https:\/\/api.github.com\/users\/pietrolesci\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pietrolesci\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pietrolesci\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pietrolesci\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pietrolesci\/orgs","repos_url":"https:\/\/api.github.com\/users\/pietrolesci\/repos","events_url":"https:\/\/api.github.com\/users\/pietrolesci\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pietrolesci\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"assignees":[{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":9,"created_at":"2022-02-03T12:10:43Z","updated_at":"2022-02-11T17:01:21Z","closed_at":"2022-02-11T17:01:21Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe dataset that is downloaded from the Hub via `load_dataset(\"snli\")` is different from what is available in the dataset viewer. In the viewer the labels are not encoded (i.e., \"neutral\", \"entailment\", \"contradiction\"), while the downloaded dataset shows the encoded labels (i.e., 0, 1, 2).\r\n\r\nIs this expected? \r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform: Ubuntu 20.4\r\n- Python version: 3.7\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3673\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3672","id":1122980556,"node_id":"PR_kwDODunzps4yBUrZ","number":3672,"title":"Prioritize `module.builder_kwargs` over defaults in `TestCommand`","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-03T11:38:42Z","updated_at":"2022-02-04T12:37:20Z","closed_at":"2022-02-04T12:37:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3672","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3672","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3672.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3672.patch","merged_at":"2022-02-04T12:37:19Z"},"body":"This fixes a bug in the `TestCommand` where multiple kwargs for `name` were passed if it was set in both default and `module.builder_kwargs`. Example error:\r\n\r\n```Python\r\nTraceback (most recent call last):\r\n File \"create_metadata.py\", line 96, in \r\n main(**vars(args))\r\n File \"create_metadata.py\", line 86, in main\r\n metadata_command.run()\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/commands\/test.py\", line 144, in run\r\n for j, builder in enumerate(get_builders()):\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/commands\/test.py\", line 141, in get_builders\r\n name=name, cache_dir=self._cache_dir, data_dir=self._data_dir, **module.builder_kwargs\r\nTypeError: type object got multiple values for keyword argument 'name'\r\n```\r\n\r\nLet me know what you think.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3672\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3671","id":1122864253,"node_id":"I_kwDODunzps5C7Yx9","number":3671,"title":"Give an estimate of the dataset size in DatasetInfo","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-03T09:47:10Z","updated_at":"2022-02-03T09:47:10Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nCurrently, only part of the datasets provide `dataset_size`, `download_size`, `size_in_bytes` (and `num_bytes` and `num_examples` inside `splits`). I would want to get this information, or an estimation, for all the datasets.\r\n\r\n**Describe the solution you'd like**\r\n\r\n- get access to the git information for the dataset files hosted on the hub\r\n- look at the [`Content-Length`](https:\/\/developer.mozilla.org\/en-US\/docs\/Web\/HTTP\/Headers\/Content-Length) for the files served by HTTP\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3671\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3670","id":1122439827,"node_id":"PR_kwDODunzps4x_kBx","number":3670,"title":"feat: \ud83c\udfb8 generate info if dataset_infos.json does not exist","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-02T22:11:56Z","updated_at":"2022-02-11T20:24:35Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3670","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3670","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3670.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3670.patch","merged_at":null},"body":"in get_dataset_infos(). Also: add the `use_auth_token` parameter, and create get_dataset_config_info()\r\n\r\n\u2705 Closes: #3013","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3670\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3669","id":1122335622,"node_id":"PR_kwDODunzps4x_OTI","number":3669,"title":"Common voice validated partition","user":{"login":"shalymin-amzn","id":98762373,"node_id":"U_kgDOBeL-hQ","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/98762373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shalymin-amzn","html_url":"https:\/\/github.com\/shalymin-amzn","followers_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/followers","following_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/orgs","repos_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/repos","events_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shalymin-amzn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2022-02-02T20:04:43Z","updated_at":"2022-02-08T17:26:52Z","closed_at":"2022-02-08T17:23:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3669","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3669","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3669.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3669.patch","merged_at":"2022-02-08T17:23:12Z"},"body":"This patch adds access to the 'validated' partitions of CommonVoice datasets (provided by the dataset creators but not available in the HuggingFace interface yet).\r\nAs 'validated' contains significantly more data than 'train' (although it contains both test and validation, so one needs to be careful there), it can be useful to train better models where no strict comparison with the previous work is intended.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3669\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3668","id":1122261736,"node_id":"I_kwDODunzps5C5Fro","number":3668,"title":" Couldn't cast array of type string error with cast_column","user":{"login":"R4ZZ3","id":25264037,"node_id":"MDQ6VXNlcjI1MjY0MDM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25264037?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/R4ZZ3","html_url":"https:\/\/github.com\/R4ZZ3","followers_url":"https:\/\/api.github.com\/users\/R4ZZ3\/followers","following_url":"https:\/\/api.github.com\/users\/R4ZZ3\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/R4ZZ3\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/R4ZZ3\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/R4ZZ3\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/R4ZZ3\/orgs","repos_url":"https:\/\/api.github.com\/users\/R4ZZ3\/repos","events_url":"https:\/\/api.github.com\/users\/R4ZZ3\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/R4ZZ3\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-02-02T18:33:29Z","updated_at":"2022-02-09T07:07:42Z","closed_at":"2022-02-09T07:07:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n\r\nIn OVH cloud during Huggingface Robust-speech-recognition event on a AI training notebook instance using jupyter lab and running jupyter notebook When using the dataset.cast_column(\"audio\",Audio(sampling_rate=16_000))\r\nmethod I get error\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152214027-9c42a71a-dd24-463c-a346-57e0287e5a8f.png)\r\n\r\nThis was working with datasets version 1.17.1.dev0\r\nbut now with version 1.18.3 produces the error above.\r\n\r\n## Steps to reproduce the bug\r\n\r\nload dataset:\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152216145-159553b6-cddc-4f0b-8607-7e76b600e22a.png)\r\n\r\n\r\nremove columns:\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152214707-7c7e89d1-87d8-4b4f-8cfc-5d7223d35644.png)\r\n\r\nrun my fix_path function.\r\nThis also creates the audio column that is referring to the absolute file path of the audio\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152214773-51f71ccf-d31b-4449-b63a-1af56436e49f.png)\r\n\r\nThen I concatenate few other datasets and finally try the cast_column method\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152215032-f341ec86-9d6d-48c9-943b-e2efe37a4d98.png)\r\n\r\nbut get error:\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152215073-b85bd057-98e8-413c-9b05-51e9805f2c24.png)\r\n\r\n\r\n\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.3\r\n- Platform: \r\nOVH Cloud, AI Training section, container for Huggingface Robust Speech Recognition event image(baaastijn\/ovh_huggingface)\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152215161-b4ff7bfb-2736-4afb-9223-761a3338d23c.png)\r\n\r\n- Python version: 3.8.8\r\n- PyArrow version:\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/152215936-4d365760-557e-456b-b5eb-ad1d15cf5073.png)\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3668\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3667","id":1122060630,"node_id":"PR_kwDODunzps4x-Ujt","number":3667,"title":"Process .opus files with torchaudio","user":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"assignees":[{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2022-02-02T15:23:14Z","updated_at":"2022-02-04T15:29:38Z","closed_at":"2022-02-04T15:29:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3667","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3667","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3667.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3667.patch","merged_at":null},"body":"@anton-l suggested to proccess .opus files with `torchaudio` instead of `soundfile` as it's faster: \r\n![opus](https:\/\/user-images.githubusercontent.com\/16348744\/152177816-2df6076c-f28b-4aef-a08d-b499b921414d.png)\r\n\r\n(moreover, I didn't manage to load .opus files with `soundfile` \/ `librosa` locally on any my machine anyway for some reason, even with `ffmpeg` installed).\r\n\r\nFor now my current changes work with locally stored file:\r\n```python\r\n# download sample opus file (from MultilingualSpokenWords dataset)\r\n!wget https:\/\/huggingface.co\/datasets\/polinaeterna\/test_opus\/resolve\/main\/common_voice_tt_17737010.opus \r\n\r\nfrom datasets import Dataset, Audio\r\n\r\naudio_path = \"common_voice_tt_17737010.opus\"\r\ndataset = Dataset.from_dict({\"audio\": [audio_path]}).cast_column(\"audio\", Audio(48000))\r\ndataset[0]\r\n# {'audio': {'path': 'common_voice_tt_17737010.opus',\r\n# 'array': array([ 0.0000000e+00, 0.0000000e+00, 3.0517578e-05, ...,\r\n# -6.1035156e-05, 6.1035156e-05, 0.0000000e+00], dtype=float32),\r\n# 'sampling_rate': 48000}}\r\n```\r\nBut it doesn't work when loading inside s dataset from bytes (I checked on [MultilingualSpokenWords](https:\/\/github.com\/huggingface\/datasets\/pull\/3666), the PR is a draft now, maybe the bug is somewhere there )\r\n\r\n```python\r\nimport torchaudio\r\nwith open(audio_path, \"rb\") as b:\r\n print(torchaudio.load(b))\r\n# RuntimeError: Error loading audio file: failed to open file \r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3667\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3666","id":1122058894,"node_id":"PR_kwDODunzps4x-ULz","number":3666,"title":"Multilingual Spoken Words","user":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-02-02T15:21:48Z","updated_at":"2022-02-11T17:30:28Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3666","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3666","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3666.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3666.patch","merged_at":null},"body":"Add [Multillingual Spoken Words dataset](https:\/\/mlcommons.org\/en\/multilingual-spoken-words\/)\r\n\r\nYou can specify multiple languages for downloading \ud83d\ude0c:\r\n```python\r\nds = load_dataset(\"datasets\/ml_spoken_words\", languages=[\"ar\", \"tt\"])\r\n```\r\n\r\n1. I didn't take into account that each time you pass a set of languages the data for a specific language is downloaded even if it was downloaded before (since these are custom configs like `ar+tt` and `ar+tt+br`. Maybe that wasn't a good idea?\r\n2. The script will have to be slightly changed after merge of https:\/\/github.com\/huggingface\/datasets\/pull\/3664 \r\n2. Just can't figure out what wrong with dummy files... \ud83d\ude1e Maybe we should get rid of them at some point \ud83d\ude01","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3666\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3665","id":1121753385,"node_id":"PR_kwDODunzps4x9TnU","number":3665,"title":"Fix MP3 resampling when a dataset's audio files have different sampling rates","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-02T10:31:45Z","updated_at":"2022-02-02T10:52:26Z","closed_at":"2022-02-02T10:52:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3665","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3665","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3665.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3665.patch","merged_at":"2022-02-02T10:52:25Z"},"body":"The resampler needs to be updated if the `orig_freq` doesn't match the audio file sampling rate\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/3662","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3665\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3664","id":1121233301,"node_id":"PR_kwDODunzps4x7mg_","number":3664,"title":"[WIP] Return local paths to Common Voice","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2022-02-01T21:48:27Z","updated_at":"2022-02-11T23:32:08Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3664","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3664","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3664.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3664.patch","merged_at":null},"body":"Fixes https:\/\/github.com\/huggingface\/datasets\/issues\/3663\r\n\r\nThis is a proposed way of returning the old local file-based generator while keeping the new streaming generator intact.\r\n\r\nTODO:\r\n- [ ] brainstorm a bit more on https:\/\/github.com\/huggingface\/datasets\/issues\/3663 to see if we can do better\r\n- [ ] refactor the heck out of this PR to avoid completely copying the logic between the two generators","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3664\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3663","id":1121067647,"node_id":"I_kwDODunzps5C0iJ_","number":3663,"title":"[Audio] Path of Common Voice cannot be used for audio loading anymore","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2022-02-01T18:40:10Z","updated_at":"2022-02-08T16:05:18Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nfrom torchaudio import load\r\n\r\nds = load_dataset(\"common_voice\", \"ab\", split=\"train\")\r\n\r\n# both of the following commands fail at the moment\r\nload(ds[0][\"audio\"][\"path\"])\r\nload(ds[0][\"path\"])\r\n```\r\n\r\n## Expected results\r\n\r\nThe path should be the complete absolute path to the downloaded audio file not some relative path.\r\n\r\n\r\n## Actual results\r\n\r\n```bash\r\n~\/hugging_face\/venv_3.9\/lib\/python3.9\/site-packages\/torchaudio\/backend\/sox_io_backend.py in load(filepath, frame_offset, num_frames, normalize, channels_first, format)\r\n 150 filepath, frame_offset, num_frames, normalize, channels_first, format)\r\n 151 filepath = os.fspath(filepath)\r\n--> 152 return torch.ops.torchaudio.sox_io_load_audio_file(\r\n 153 filepath, frame_offset, num_frames, normalize, channels_first, format)\r\n 154\r\n\r\nRuntimeError: Error loading audio file: failed to open file cv-corpus-6.1-2020-12-11\/ab\/clips\/common_voice_ab_19904194.mp3\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.3.dev0\r\n- Platform: Linux-5.4.0-96-generic-x86_64-with-glibc2.27\r\n- Python version: 3.9.1\r\n- PyArrow version: 3.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3663\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3662","id":1121024403,"node_id":"I_kwDODunzps5C0XmT","number":3662,"title":"[Audio] MP3 resampling is incorrect when dataset's audio files have different sampling rates","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2022-02-01T17:55:04Z","updated_at":"2022-02-02T10:52:25Z","closed_at":"2022-02-02T10:52:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The Audio feature resampler for MP3 gets stuck with the first original frequencies it meets, which leads to subsequent decoding to be incorrect.\r\n\r\nHere is a code to reproduce the issue:\r\n\r\nLet's first consider two audio files with different sampling rates 32000 and 16000:\r\n```python\r\n# first download a mp3 file with sampling_rate=32000\r\n!wget https:\/\/file-examples-com.github.io\/uploads\/2017\/11\/file_example_MP3_700KB.mp3\r\n\r\nimport torchaudio\r\n\r\naudio_path = \"file_example_MP3_700KB.mp3\"\r\naudio_path2 = audio_path.replace(\".mp3\", \"_resampled.mp3\")\r\nresample = torchaudio.transforms.Resample(32000, 16000) # create a new file with sampling_rate=16000\r\ntorchaudio.save(audio_path2, resample(torchaudio.load(audio_path)[0]), 16000)\r\n```\r\n\r\nThen we can see an issue here when decoding:\r\n```python\r\nfrom datasets import Dataset, Audio\r\n\r\ndataset = Dataset.from_dict({\"audio\": [audio_path, audio_path2]}).cast_column(\"audio\", Audio(48000))\r\ndataset[0] # decode the first audio file sets the resampler orig_freq to 32000\r\nprint(dataset .features[\"audio\"]._resampler.orig_freq)\r\n# 32000\r\nprint(dataset[0][\"audio\"][\"array\"].shape) # here decoding is fine\r\n# (1308096,)\r\n\r\ndataset = Dataset.from_dict({\"audio\": [audio_path, audio_path2]}).cast_column(\"audio\", Audio(48000))\r\ndataset[1] # decode the second audio file sets the resampler orig_freq to 16000\r\nprint(dataset .features[\"audio\"]._resampler.orig_freq)\r\n# 16000\r\nprint(dataset[0][\"audio\"][\"array\"].shape) # here decoding uses orig_freq=16000 instead of 32000\r\n# (2616192,)\r\n```\r\n\r\nThe value of `orig_freq` doesn't change no matter what file needs to be decoded\r\n\r\ncc @patrickvonplaten @anton-l @cahya-wirawan @albertvillanova \r\n\r\nThe issue seems to be here in `Audio.decode_mp3`:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/4c417d52def6e20359ca16c6723e0a2855e5c3fd\/src\/datasets\/features\/audio.py#L176-L180","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3662\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3661","id":1121000251,"node_id":"PR_kwDODunzps4x61ad","number":3661,"title":"Remove unnecessary 'r' arg in","user":{"login":"bryant1410","id":3905501,"node_id":"MDQ6VXNlcjM5MDU1MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3905501?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bryant1410","html_url":"https:\/\/github.com\/bryant1410","followers_url":"https:\/\/api.github.com\/users\/bryant1410\/followers","following_url":"https:\/\/api.github.com\/users\/bryant1410\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bryant1410\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bryant1410\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bryant1410\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bryant1410\/orgs","repos_url":"https:\/\/api.github.com\/users\/bryant1410\/repos","events_url":"https:\/\/api.github.com\/users\/bryant1410\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bryant1410\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-02-01T17:29:27Z","updated_at":"2022-02-07T16:57:27Z","closed_at":"2022-02-07T16:02:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3661","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3661","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3661.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3661.patch","merged_at":"2022-02-07T16:02:42Z"},"body":"Originally from #3489","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3661\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3660","id":1120982671,"node_id":"PR_kwDODunzps4x6xr8","number":3660,"title":"Change HTTP links to HTTPS","user":{"login":"bryant1410","id":3905501,"node_id":"MDQ6VXNlcjM5MDU1MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3905501?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bryant1410","html_url":"https:\/\/github.com\/bryant1410","followers_url":"https:\/\/api.github.com\/users\/bryant1410\/followers","following_url":"https:\/\/api.github.com\/users\/bryant1410\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bryant1410\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bryant1410\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bryant1410\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bryant1410\/orgs","repos_url":"https:\/\/api.github.com\/users\/bryant1410\/repos","events_url":"https:\/\/api.github.com\/users\/bryant1410\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bryant1410\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-01T17:12:51Z","updated_at":"2022-02-01T18:34:47Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3660","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3660","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3660.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3660.patch","merged_at":null},"body":"I tested the links. I also fixed some typos.\r\n\r\nOriginally from #3489","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3660\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3659","id":1120913672,"node_id":"I_kwDODunzps5Cz8kI","number":3659,"title":"push_to_hub but preview not working","user":{"login":"thomas-happify","id":66082334,"node_id":"MDQ6VXNlcjY2MDgyMzM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66082334?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomas-happify","html_url":"https:\/\/github.com\/thomas-happify","followers_url":"https:\/\/api.github.com\/users\/thomas-happify\/followers","following_url":"https:\/\/api.github.com\/users\/thomas-happify\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomas-happify\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomas-happify\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomas-happify\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomas-happify\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomas-happify\/repos","events_url":"https:\/\/api.github.com\/users\/thomas-happify\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomas-happify\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-02-01T16:23:57Z","updated_at":"2022-02-09T08:00:37Z","closed_at":"2022-02-09T08:00:37Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*happifyhealth\/twitter_pnn*'\r\n\r\n**Link:** *[link to the dataset viewer page](https:\/\/huggingface.co\/datasets\/happifyhealth\/twitter_pnn)*\r\n\r\nI used \r\n```\r\ndataset.push_to_hub(\"happifyhealth\/twitter_pnn\")\r\n```\r\nbut the preview is not working.\r\n\r\nAm I the one who added this dataset ? Yes\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3659\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3658","id":1120880395,"node_id":"I_kwDODunzps5Cz0cL","number":3658,"title":"Dataset viewer issue for *P3*","user":{"login":"jeffistyping","id":22351555,"node_id":"MDQ6VXNlcjIyMzUxNTU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22351555?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jeffistyping","html_url":"https:\/\/github.com\/jeffistyping","followers_url":"https:\/\/api.github.com\/users\/jeffistyping\/followers","following_url":"https:\/\/api.github.com\/users\/jeffistyping\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jeffistyping\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jeffistyping\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jeffistyping\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jeffistyping\/orgs","repos_url":"https:\/\/api.github.com\/users\/jeffistyping\/repos","events_url":"https:\/\/api.github.com\/users\/jeffistyping\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jeffistyping\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-02-01T15:57:56Z","updated_at":"2022-02-01T15:57:56Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*P3*'\r\n\r\n**Link: https:\/\/huggingface.co\/datasets\/bigscience\/P3**\r\n\r\n```\r\nStatus code: 400\r\nException: SplitsNotFoundError\r\nMessage: The split names could not be parsed from the dataset config.\r\n```\r\nAm I the one who added this dataset ? No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3658\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3657","id":1120602620,"node_id":"PR_kwDODunzps4x5f1I","number":3657,"title":"Extend dataset builder for streaming in `get_dataset_split_names`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-02-01T12:21:24Z","updated_at":"2022-02-03T22:49:06Z","closed_at":"2022-02-02T11:22:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3657","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3657","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3657.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3657.patch","merged_at":"2022-02-02T11:22:01Z"},"body":"Currently, `get_dataset_split_names` doesn't extend a builder module to support streaming, even though it uses `StreamingDownloadManager` to download data. This PR fixes that.\r\n\r\nTo test the change, run the following:\r\n```bash\r\npip install git+https:\/\/github.com\/huggingface\/datasets.git@fix-get_dataset_split_names-streaming\r\npython -c \"from datasets import get_dataset_split_names; print(get_dataset_split_names('facebook\/multilingual_librispeech', 'german', download_mode='force_redownload', revision='137923f945552c6afdd8b60e4a7b43e3088972c1'))\"\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3657\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3656","id":1120510823,"node_id":"I_kwDODunzps5CyaNn","number":3656,"title":"checksum error subjqa dataset","user":{"login":"RensDimmendaal","id":9828683,"node_id":"MDQ6VXNlcjk4Mjg2ODM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9828683?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RensDimmendaal","html_url":"https:\/\/github.com\/RensDimmendaal","followers_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/followers","following_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/orgs","repos_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/repos","events_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RensDimmendaal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-02-01T10:53:33Z","updated_at":"2022-02-10T10:56:59Z","closed_at":"2022-02-10T10:56:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI get a checksum error when loading the `subjqa` dataset (used in the transformers book).\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nsubjqa = load_dataset(\"subjqa\",\"electronics\")\r\n```\r\n\r\n## Expected results\r\nLoading the dataset\r\n\r\n## Actual results\r\n\r\n```\r\n---------------------------------------------------------------------------\r\n\r\nNonMatchingChecksumError Traceback (most recent call last)\r\n\r\n in ()\r\n 2 from datasets import load_dataset\r\n 3 \r\n----> 4 subjqa = load_dataset(\"subjqa\",\"electronics\")\r\n\r\n3 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 38 if len(bad_urls) > 0:\r\n 39 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 40 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 41 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 42 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/github.com\/lewtun\/SubjQA\/archive\/refs\/heads\/master.zip']\r\n```\r\n\r\n## Environment info\r\n\r\nGoogle colab\r\n\r\n- `datasets` version: 1.18.2\r\n- Platform: Linux-5.4.144+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3656\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3655","id":1119801077,"node_id":"I_kwDODunzps5Cvs71","number":3655,"title":"Pubmed dataset not reachable","user":{"login":"abhi-mosaic","id":77638579,"node_id":"MDQ6VXNlcjc3NjM4NTc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/77638579?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhi-mosaic","html_url":"https:\/\/github.com\/abhi-mosaic","followers_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/followers","following_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/repos","events_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhi-mosaic\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-01-31T18:45:47Z","updated_at":"2022-02-11T15:54:06Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nTrying to use the `pubmed` dataset fails to reach \/ download the source files.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\npubmed_train = datasets.load_dataset('pubmed', split='train')\r\n```\r\n\r\n## Expected results\r\nShould begin downloading the pubmed dataset.\r\n\r\n## Actual results\r\n```\r\nConnectionError: Couldn't reach ftp:\/\/ftp.ncbi.nlm.nih.gov\/pubmed\/baseline\/pubmed21n0865.xml.gz (InvalidSchema(\"No connection adapters were found for 'ftp:\/\/ftp.ncbi.nlm.nih.gov\/pubmed\/baseline\/pubmed21n0865.xml.gz'\"))\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.18.2\r\n- Platform: macOS-11.4-x86_64-i386-64bit\r\n- Python version: 3.8.2\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3655\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3654","id":1119717475,"node_id":"PR_kwDODunzps4x2kiX","number":3654,"title":"Better TQDM output","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-31T17:22:43Z","updated_at":"2022-02-03T15:55:34Z","closed_at":"2022-02-03T15:55:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3654","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3654","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3654.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3654.patch","merged_at":"2022-02-03T15:55:33Z"},"body":"This PR does the following:\r\n* if `dataset_infos.json` exists for a dataset, uses `num_examples` to print the total number of examples that needs to be generated (in `builder.py`)\r\n* fixes `tqdm` + multiprocessing in Jupyter Notebook\/Colab (the issue stems from this commit in the `tqdm` repo: https:\/\/github.com\/tqdm\/tqdm\/commit\/f7722edecc3010cb35cc1c923ac4850a76336f82) \r\n* adds the missing `drop_last_batch` and `with_ranks` params to `DatasetDict.map` \r\n* correctly computes the number of iterations in `map` and the CSV\/JSON loader when `batched=True` to fix `tqdm` progress bars\r\n* removes the `bool(logging.get_verbosity() == logging.NOTSET)` (or simplifies `bool(logging.get_verbosity() == logging.NOTSET) or not utils.is_progress_bar_enabled()` to `not utils.is_progress_bar_enabled()`) condition and uses `utils.is_progress_bar_enabled` to check if `tqdm` output is enabled (this comment from @stas00 explains why the `bool(logging.get_verbosity() == logging.NOTSET)` check is problematic: https:\/\/github.com\/huggingface\/transformers\/issues\/14889#issue-1087318463)\r\n\r\nFix #2630","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3654\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3653","id":1119186952,"node_id":"I_kwDODunzps5CtXAI","number":3653,"title":"`to_json` in multiprocessing fashion sometimes deadlock","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-31T09:35:07Z","updated_at":"2022-01-31T09:35:07Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n`to_json` in multiprocessing fashion sometimes deadlock, instead of raising exceptions. Temporary solution is to see that it deadlocks, and then reduce the number of processes or batch size in order to reduce the memory footprint.\r\n\r\nAs @lhoestq pointed out, this might be related to https:\/\/bugs.python.org\/issue22393#msg315684 where `multiprocessing` fails to raise the OOM exception. One suggested alternative is not use `concurrent.futures` instead.\r\n\r\n## Steps to reproduce the bug\r\n\r\n## Expected results\r\n\r\nScript fails when one worker hits OOM, and raise appropriate error.\r\n\r\n## Actual results\r\n\r\nDeadlock\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.1\r\n- Platform: Linux\r\n- Python version: 3.8\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3653\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3652","id":1118808738,"node_id":"PR_kwDODunzps4xzinr","number":3652,"title":"sp. Columbia => Colombia","user":{"login":"serapio","id":3781280,"node_id":"MDQ6VXNlcjM3ODEyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3781280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/serapio","html_url":"https:\/\/github.com\/serapio","followers_url":"https:\/\/api.github.com\/users\/serapio\/followers","following_url":"https:\/\/api.github.com\/users\/serapio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/serapio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/serapio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/serapio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/serapio\/orgs","repos_url":"https:\/\/api.github.com\/users\/serapio\/repos","events_url":"https:\/\/api.github.com\/users\/serapio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/serapio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-31T00:41:03Z","updated_at":"2022-02-09T16:55:25Z","closed_at":"2022-01-31T08:29:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3652","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3652","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3652.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3652.patch","merged_at":"2022-01-31T08:29:07Z"},"body":"\"Columbia\" is various places in North America. The country is \"Colombia\".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3652\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3651","id":1118597647,"node_id":"PR_kwDODunzps4xy3De","number":3651,"title":"Update link in wiki_bio dataset","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-30T16:28:54Z","updated_at":"2022-01-31T14:50:48Z","closed_at":"2022-01-31T08:38:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3651","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3651","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3651.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3651.patch","merged_at":"2022-01-31T08:38:09Z"},"body":"Fixes #3580 and makes the wiki_bio dataset work again. I changed the link and some documentation, and all the tests pass. Thanks @lhoestq for uploading the dataset to the HuggingFace data bucket.\r\n\r\n@lhoestq -- all the tests pass, but I'm still not able to import the dataset, as the old Google Drive link is cached somewhere:\r\n```python\r\n>>> from datasets import load_dataset\r\nload_dataset(\"wiki_bio>>> load_dataset(\"wiki_bio\")\r\nUsing custom data configuration default\r\nDownloading and preparing dataset wiki_bio\/default (download: 318.53 MiB, generated: 736.94 MiB, post-processed: Unknown size, total: 1.03 GiB) to \/home\/jxm3\/.cache\/huggingface\/datasets\/wiki_bio\/default\/1.1.0\/5293ce565954ba965dada626f1e79684e98172d950371d266bf3caaf87e911c9...\r\nTraceback (most recent call last):\r\n ...\r\n File \"\/home\/jxm3\/random\/datasets\/src\/datasets\/utils\/file_utils.py\", line 612, in get_from_cache\r\n raise FileNotFoundError(f\"Couldn't find file at {url}\")\r\nFileNotFoundError: Couldn't find file at https:\/\/drive.google.com\/uc?export=download&id=1L7aoUXzHPzyzQ0ns4ApBbYepsjFOtXil\r\n```\r\n\r\nwhat do I have to do to invalidate the cache and actually import the dataset? It's clearly set up correctly, since the data is downloaded and processed by the tests.\r\n\r\nAs an aside, this caching-loading-scripts behavior makes for a really bad developer experience. I just wasted an hour trying to figure out where the caching was happening and how to disable it, and I don't know. All I wanted to do was update the link and submit a pull request! I recommend that you all either change this behavior (i.e. updating the link to a dataset should \"just work\") or document it, since I couldn't find any information about this in the contributing.md or readme or anywhere else! Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3651\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3650","id":1118537429,"node_id":"PR_kwDODunzps4xyr2o","number":3650,"title":"Allow 'to_json' to run in unordered fashion in order to lower memory footprint","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-01-30T13:23:19Z","updated_at":"2022-02-01T17:49:21Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3650","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3650","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3650.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3650.patch","merged_at":null},"body":"I'm using `to_json(..., num_proc=num_proc, compressiong='gzip')` with `num_proc>1`. I'm having an issue where things seem to deadlock at some point. Eventually I see OOM. I'm guessing it's an issue where one process starts to take a long time for a specific batch, and so other process keep accumulating their results in memory.\r\n\r\nIn order to flush memory, I propose we use optional `imap_unordered`. This will prevent one process to block the other ones. The logical thinking is that index are rarily relevant, and in one wants to keep an index, one can still create another column and reconstruct from there.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3650\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3649","id":1117502250,"node_id":"I_kwDODunzps5Cm7sq","number":3649,"title":"Add IGLUE dataset","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608944167,"node_id":"LA_kwDODunzps7XHB4n","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/multimodal","name":"multimodal","color":"19E633","default":false,"description":"Multimodal datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-28T14:59:41Z","updated_at":"2022-01-28T15:02:35Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** IGLUE\r\n- **Description:** IGLUE brings together 4 vision-and-language tasks across 20 languages (Twitter [thread](https:\/\/twitter.com\/ebugliarello\/status\/1487045497583976455?s=20&t=SB4LZGDhhkUW83ugcX_m5w))\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2201.11732\r\n- **Data:** https:\/\/github.com\/e-bug\/iglue\r\n- **Motivation:** This dataset would provide a nice example of combining the text and image features of `datasets` together for multimodal applications.\r\n\r\nNote: the data \/ code are not yet visible on the GitHub repo, so I've pinged the authors for more information.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3649\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3648","id":1117465505,"node_id":"PR_kwDODunzps4xvXig","number":3648,"title":"Fix Windows CI: bump python to 3.7","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-28T14:24:54Z","updated_at":"2022-01-28T14:40:39Z","closed_at":"2022-01-28T14:40:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3648","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3648","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3648.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3648.patch","merged_at":"2022-01-28T14:40:39Z"},"body":"Python>=3.7 is needed to install `tokenizers` 0.11","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3648\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3647","id":1117383675,"node_id":"PR_kwDODunzps4xvGDQ","number":3647,"title":"Fix `add_column` on datasets with indices mapping","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-28T13:06:29Z","updated_at":"2022-01-28T15:35:58Z","closed_at":"2022-01-28T15:35:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3647","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3647","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3647.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3647.patch","merged_at":"2022-01-28T15:35:57Z"},"body":"My initial idea was to avoid the `flatten_indices` call and reorder a new column instead, but in the end I decided to follow `concatenate_datasets` and use `flatten_indices` to avoid padding when `dataset._indices.num_rows != dataset._data.num_rows`.\r\n\r\nFix #3599","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3647\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3646","id":1116544627,"node_id":"PR_kwDODunzps4xsX66","number":3646,"title":"Fix streaming datasets that are not reset correctly","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-27T17:21:02Z","updated_at":"2022-01-28T16:34:29Z","closed_at":"2022-01-28T16:34:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3646","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3646","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3646.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3646.patch","merged_at":"2022-01-28T16:34:28Z"},"body":"Streaming datasets that use `StreamingDownloadManager.iter_archive` and `StreamingDownloadManager.iter_files` had some issues. Indeed if you try to iterate over such dataset twice, then the second time it will be empty.\r\n\r\nThis is because the two methods above are generator functions. I fixed this by making them return iterables that are reset properly instead.\r\n\r\nClose https:\/\/github.com\/huggingface\/datasets\/issues\/3645\r\ncc @anton-l ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3646\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3645","id":1116541298,"node_id":"I_kwDODunzps5CjRFy","number":3645,"title":"Streaming dataset based on dl_manager.iter_archive\/iter_files are not reset correctly","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-27T17:17:41Z","updated_at":"2022-01-28T16:34:28Z","closed_at":"2022-01-28T16:34:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi ! When iterating over a streaming dataset once, it's not reset correctly because of some issues with `dl_manager.iter_archive` and `dl_manager.iter_files`. Indeed they are generator functions (so the iterator that is returned can be exhausted). They should be iterables instead, and be reset if we do a for loop again:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nd = load_dataset(\"common_voice\", \"ab\", split=\"test\", streaming=True)\r\n\r\ni = 0\r\nfor i, _ in enumerate(d):\r\n pass\r\nprint(i) # 8\r\n# let's do it again\r\ni = 0\r\nfor i, _ in enumerate(d):\r\n pass\r\nprint(i) # 0\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3645\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3644","id":1116519670,"node_id":"I_kwDODunzps5CjLz2","number":3644,"title":"Add a GROUP BY operator","user":{"login":"felix-schneider","id":208336,"node_id":"MDQ6VXNlcjIwODMzNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/208336?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/felix-schneider","html_url":"https:\/\/github.com\/felix-schneider","followers_url":"https:\/\/api.github.com\/users\/felix-schneider\/followers","following_url":"https:\/\/api.github.com\/users\/felix-schneider\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/felix-schneider\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/felix-schneider\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/felix-schneider\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/felix-schneider\/orgs","repos_url":"https:\/\/api.github.com\/users\/felix-schneider\/repos","events_url":"https:\/\/api.github.com\/users\/felix-schneider\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/felix-schneider\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-27T16:57:54Z","updated_at":"2022-02-08T15:06:10Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nUsing batch mapping, we can easily split examples. However, we lack an appropriate option for merging them back together by some key. Consider this example:\r\n\r\n```python\r\n# features:\r\n# {\r\n# \"example_id\": datasets.Value(\"int32\"),\r\n# \"text\": datasets.Value(\"string\")\r\n# }\r\n\r\nds = datasets.Dataset()\r\n\r\n\r\ndef split(examples):\r\n sentences = [text.split(\".\") for text in examples[\"text\"]]\r\n return {\r\n \"example_id\": [\r\n example_id\r\n for example_id, sents in zip(examples[\"example_id\"], sentences)\r\n for _ in sents\r\n ],\r\n \"sentence\": [sent for sents in sentences for sent in sents],\r\n \"sentence_id\": [i for sents in sentences for i in range(len(sents))],\r\n }\r\n\r\n\r\nsplit_ds = ds.map(split, batched=True)\r\n\r\n\r\ndef process(examples):\r\n outputs = some_neural_network_that_works_on_sentences(examples[\"sentence\"])\r\n return {\"outputs\": outputs}\r\n\r\n\r\nsplit_ds = split_ds.map(process, batched=True)\r\n```\r\n\r\nI have a dataset consisting of texts that I would like to process sentence by sentence in a batched way. Afterwards, I would like to put it back together as it was, merging the outputs together.\r\n\r\n**Describe the solution you'd like**\r\nIdeally, it would look something like this:\r\n\r\n```python\r\ndef join(examples):\r\n order = np.argsort(examples[\"sentence_id\"])\r\n text = \".\".join(examples[\"text\"][i] for i in order)\r\n outputs = [examples[\"outputs\"][i] for i in order]\r\n return {\"text\": text, \"outputs\": outputs}\r\n\r\n\r\nds = split_ds.group_by(\"example_id\", join)\r\n```\r\n\r\n**Describe alternatives you've considered**\r\nRight now, we can do this:\r\n```python\r\ndef merge(example):\r\n meeting_id = example[\"example_id\"]\r\n parts = split_ds.filter(lambda x: x[\"example_id\"] == meeting_id).sort(\"segment_no\")\r\n return {\"outputs\": list(parts[\"outputs\"])}\r\n\r\nds = ds.map(merge)\r\n```\r\n\r\nOf course, we could process the dataset like this:\r\n\r\n```python\r\ndef process(example):\r\n outputs = some_neural_network_that_works_on_sentences(example[\"text\"].split(\".\"))\r\n return {\"outputs\": outputs}\r\n\r\nds = ds.map(process, batched=True)\r\n```\r\n\r\nHowever, that does not allow using an arbitrary batch size and may lead to very inefficient use of resources if the batch size is much larger than the number of sentences in one example.\r\n\r\nI would very much appreciate some kind of group by operator to merge examples based on the value of one column.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3644\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3643","id":1116417428,"node_id":"PR_kwDODunzps4xr8mX","number":3643,"title":"Fix sem_eval_2018_task_1 download location","user":{"login":"maxpel","id":31095360,"node_id":"MDQ6VXNlcjMxMDk1MzYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31095360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxpel","html_url":"https:\/\/github.com\/maxpel","followers_url":"https:\/\/api.github.com\/users\/maxpel\/followers","following_url":"https:\/\/api.github.com\/users\/maxpel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxpel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxpel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxpel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxpel\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxpel\/repos","events_url":"https:\/\/api.github.com\/users\/maxpel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxpel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-27T15:45:00Z","updated_at":"2022-02-04T15:15:26Z","closed_at":"2022-02-04T15:15:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3643","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3643","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3643.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3643.patch","merged_at":"2022-02-04T15:15:26Z"},"body":"As discussed with @lhoestq in https:\/\/github.com\/huggingface\/datasets\/issues\/3549#issuecomment-1020176931_ this is the new pull request to fix the download location.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3643\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3642","id":1116306986,"node_id":"PR_kwDODunzps4xrj2S","number":3642,"title":"Fix dataset slicing with negative bounds when indices mapping is not `None`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-27T14:45:53Z","updated_at":"2022-01-27T18:16:23Z","closed_at":"2022-01-27T18:16:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3642","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3642","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3642.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3642.patch","merged_at":"2022-01-27T18:16:22Z"},"body":"Fix #3611 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3642\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3641","id":1116284268,"node_id":"PR_kwDODunzps4xre7C","number":3641,"title":"Fix numpy rngs when seed is None","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-27T14:29:09Z","updated_at":"2022-01-27T18:16:08Z","closed_at":"2022-01-27T18:16:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3641","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3641","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3641.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3641.patch","merged_at":"2022-01-27T18:16:07Z"},"body":"Fixes the NumPy RNG when `seed` is `None`.\r\n\r\nThe problem becomes obvious after reading the NumPy notes on RNG (returned by `np.random.get_state()`):\r\n> The MT19937 state vector consists of a 624-element array of 32-bit unsigned integers plus a single integer value between 0 and 624 that indexes the current position within the main array.\r\n\r\n`The MT19937 state vector`: the seed which we currently index, but this value stays the same for multiple rounds.\r\n`plus a single integer value`: the `pos` value in this PR (is 624 if `seed` is set to a fixed value with `np.random.seed`, so we take the first value in the `seed` array returned by `np.random.get_state()`: https:\/\/stackoverflow.com\/questions\/32172054\/how-can-i-retrieve-the-current-seed-of-numpys-random-number-generator)\r\n\r\nNumPy notes: https:\/\/numpy.org\/doc\/stable\/reference\/random\/bit_generators\/mt19937.html\r\n\r\nFix #3634 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3641\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3640","id":1116133769,"node_id":"I_kwDODunzps5ChtmJ","number":3640,"title":"Issues with custom dataset in Wav2Vec2","user":{"login":"peregilk","id":9079808,"node_id":"MDQ6VXNlcjkwNzk4MDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9079808?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/peregilk","html_url":"https:\/\/github.com\/peregilk","followers_url":"https:\/\/api.github.com\/users\/peregilk\/followers","following_url":"https:\/\/api.github.com\/users\/peregilk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/peregilk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/peregilk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/peregilk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/peregilk\/orgs","repos_url":"https:\/\/api.github.com\/users\/peregilk\/repos","events_url":"https:\/\/api.github.com\/users\/peregilk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/peregilk\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-27T12:09:05Z","updated_at":"2022-01-27T12:29:48Z","closed_at":"2022-01-27T12:29:48Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"We are training Vav2Vec using the run_speech_recognition_ctc_bnb.py-script.\r\n\r\nThis is working fine with Common Voice, however using our custom dataset and data loader at [NbAiLab\/NPSC]( https:\/\/huggingface.co\/datasets\/NbAiLab\/NPSC) it crashes after roughly 1 epoch with the following stack trace:\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/9079808\/151355893-6d5887cc-ca19-4b12-948a-124eb6dac372.png)\r\n\r\n\r\nWe are able to work around the issue, for instance by adding this check in line#222 in transformers\/models\/wav2vec2\/modeling_wav2vec2.py:\r\n```python\r\nif input_length - (mask_length - 1) < num_masked_span:\r\n num_masked_span = input_length - (mask_length - 1)\r\n```\r\nInterestingly, these are the variable values before the adjustment:\r\n```\r\ninput_length=10\r\nmask_length=10\r\nnum_masked_span=2\r\n````\r\nAfter adjusting num_masked_spin to 1, the training script runs. The issue is also fixed by setting \u201creplace=True\u201d in the same function.\r\n\r\nDo you have any idea what is causing this, and how to fix this error permanently? If you do not think this is an Datasets issue, feel free to move the issue.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3640\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3639","id":1116021420,"node_id":"I_kwDODunzps5ChSKs","number":3639,"title":"same value of precision, recall, f1 score at each epoch for classification task. ","user":{"login":"Dhanachandra","id":10828657,"node_id":"MDQ6VXNlcjEwODI4NjU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10828657?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dhanachandra","html_url":"https:\/\/github.com\/Dhanachandra","followers_url":"https:\/\/api.github.com\/users\/Dhanachandra\/followers","following_url":"https:\/\/api.github.com\/users\/Dhanachandra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dhanachandra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dhanachandra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dhanachandra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dhanachandra\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dhanachandra\/repos","events_url":"https:\/\/api.github.com\/users\/Dhanachandra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dhanachandra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-27T10:14:16Z","updated_at":"2022-02-09T16:11:49Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**1st Epoch:** \r\n1\/27\/2022 09:30:48 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/f1\/default\/default_experiment-1-0.arrow.59it\/s]\r\n01\/27\/2022 09:30:48 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/precision\/default\/default_experiment-1-0.arrow\r\n01\/27\/2022 09:30:49 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/recall\/default\/default_experiment-1-0.arrow\r\nPRECISION: {'precision': 0.7612903225806451}\r\nRECALL: {'recall': 0.7612903225806451}\r\nF1: {'f1': 0.7612903225806451}\r\n{'eval_loss': 1.4658324718475342, 'eval_accuracy': 0.7612903118133545, 'eval_runtime': 30.0054, 'eval_samples_per_second': 46.492, 'eval_steps_per_second': 46.492, 'epoch': 3.0} \r\n**4th Epoch:**\r\n1\/27\/2022 09:56:55 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/f1\/default\/default_experiment-1-0.arrow.92it\/s]\r\n01\/27\/2022 09:56:56 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/precision\/default\/default_experiment-1-0.arrow\r\n01\/27\/2022 09:56:56 - INFO - datasets.metric - Removing \/home\/ubuntu\/.cache\/huggingface\/metrics\/recall\/default\/default_experiment-1-0.arrow\r\nPRECISION: {'precision': 0.7698924731182796}\r\nRECALL: {'recall': 0.7698924731182796}\r\nF1: {'f1': 0.7698924731182796}\r\n\r\n\r\n## Environment info\r\n!git clone https:\/\/github.com\/huggingface\/transformers\r\n%cd transformers\r\n!pip install .\r\n!pip install -r \/content\/transformers\/examples\/pytorch\/token-classification\/requirements.txt\r\n!pip install datasets","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3639\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3638","id":1115725703,"node_id":"I_kwDODunzps5CgJ-H","number":3638,"title":"AutoTokenizer hash value got change after datasets.map","user":{"login":"tshu-w","id":13161779,"node_id":"MDQ6VXNlcjEzMTYxNzc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13161779?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tshu-w","html_url":"https:\/\/github.com\/tshu-w","followers_url":"https:\/\/api.github.com\/users\/tshu-w\/followers","following_url":"https:\/\/api.github.com\/users\/tshu-w\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tshu-w\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tshu-w\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tshu-w\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tshu-w\/orgs","repos_url":"https:\/\/api.github.com\/users\/tshu-w\/repos","events_url":"https:\/\/api.github.com\/users\/tshu-w\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tshu-w\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":9,"created_at":"2022-01-27T03:19:03Z","updated_at":"2022-01-28T03:20:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAutoTokenizer hash value got change after datasets.map\r\n\r\n## Steps to reproduce the bug\r\n1. trash huggingface datasets cache\r\n2. run the following code:\r\n```python\r\nfrom transformers import AutoTokenizer, BertTokenizer\r\nfrom datasets import load_dataset\r\nfrom datasets.fingerprint import Hasher\r\ntokenizer = AutoTokenizer.from_pretrained('bert-base-uncased')\r\n\r\ndef tokenize_function(example):\r\n return tokenizer(example[\"sentence1\"], example[\"sentence2\"], truncation=True)\r\n\r\nraw_datasets = load_dataset(\"glue\", \"mrpc\")\r\n\r\nprint(Hasher.hash(tokenize_function))\r\nprint(Hasher.hash(tokenizer))\r\n\r\ntokenized_datasets = raw_datasets.map(tokenize_function, batched=True)\r\n\r\nprint(Hasher.hash(tokenize_function))\r\nprint(Hasher.hash(tokenizer))\r\n```\r\ngot\r\n```\r\nReusing dataset glue (\/home1\/wts\/.cache\/huggingface\/datasets\/glue\/mrpc\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 3\/3 [00:00<00:00, 1112.35it\/s]\r\nf4976bb4694ebc51\r\n3fca35a1fd4a1251\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 4\/4 [00:00<00:00, 6.96ba\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 15.25ba\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 5.81ba\/s]\r\nd32837619b7d7d01\r\n5fd925c82edd62b6\r\n```\r\n3. run raw_datasets.map(tokenize_function, batched=True) again and see some dataset are not using cache.\r\n\r\n## Expected results\r\n`AutoTokenizer` work like specific Tokenizer (The hash value don't change after map):\r\n```python\r\nfrom transformers import AutoTokenizer, BertTokenizer\r\nfrom datasets import load_dataset\r\nfrom datasets.fingerprint import Hasher\r\ntokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\r\n\r\ndef tokenize_function(example):\r\n return tokenizer(example[\"sentence1\"], example[\"sentence2\"], truncation=True)\r\n\r\nraw_datasets = load_dataset(\"glue\", \"mrpc\")\r\n\r\nprint(Hasher.hash(tokenize_function))\r\nprint(Hasher.hash(tokenizer))\r\n\r\ntokenized_datasets = raw_datasets.map(tokenize_function, batched=True)\r\n\r\nprint(Hasher.hash(tokenize_function))\r\nprint(Hasher.hash(tokenizer))\r\n```\r\n\r\n```\r\nReusing dataset glue (\/home1\/wts\/.cache\/huggingface\/datasets\/glue\/mrpc\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 3\/3 [00:00<00:00, 1091.22it\/s]\r\n46d4b31f54153fc7\r\n5b8771afd8d43888\r\nLoading cached processed dataset at \/home1\/wts\/.cache\/huggingface\/datasets\/glue\/mrpc\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad\/cache-6b07ff82ae9d5c51.arrow\r\nLoading cached processed dataset at \/home1\/wts\/.cache\/huggingface\/datasets\/glue\/mrpc\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad\/cache-af738a6d84f3864b.arrow\r\nLoading cached processed dataset at \/home1\/wts\/.cache\/huggingface\/datasets\/glue\/mrpc\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad\/cache-531d2a603ba713c1.arrow\r\n46d4b31f54153fc7\r\n5b8771afd8d43888\r\n```\r\n\r\n\r\n## Environment info\r\n- `datasets` version: 1.18.0\r\n- Platform: Linux-5.4.0-91-generic-x86_64-with-glibc2.27\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3638\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3637","id":1115526438,"node_id":"I_kwDODunzps5CfZUm","number":3637,"title":"[TypeError: Couldn't cast array of type] Cannot load dataset in v1.18","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-26T21:38:02Z","updated_at":"2022-02-09T16:15:53Z","closed_at":"2022-02-09T16:15:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to load the [`GEM\/RiSAWOZ` dataset](https:\/\/huggingface.co\/datasets\/GEM\/RiSAWOZ) in `datasets` v1.18.1 and am running into a type error when casting the features. The strange thing is that I can load the dataset with v1.17.0. Note that the error is also present if I install from `master` too.\r\n\r\nAs far as I can tell, the dataset loading script is correct and the problematic features [here](https:\/\/huggingface.co\/datasets\/GEM\/RiSAWOZ\/blob\/main\/RiSAWOZ.py#L237) also look fine to me.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndset = load_dataset(\"GEM\/RiSAWOZ\")\r\n```\r\n\r\n## Expected results\r\nI can load the dataset without error.\r\n\r\n## Actual results\r\n\r\n
Traceback<\/summary>\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/builder.py in _prepare_split(self, split_generator)\r\n 1083 example = self.info.features.encode_example(record)\r\n-> 1084 writer.write(example, key)\r\n 1085 finally:\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in write(self, example, key, writer_batch_size)\r\n 445 \r\n--> 446 self.write_examples_on_file()\r\n 447 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in write_examples_on_file(self)\r\n 403 batch_examples[col] = [row[0][col] for row in self.current_examples]\r\n--> 404 self.write_batch(batch_examples=batch_examples)\r\n 405 self.current_examples = []\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in write_batch(self, batch_examples, writer_batch_size)\r\n 496 typed_sequence = OptimizedTypedSequence(batch_examples[col], type=col_type, try_type=col_try_type, col=col)\r\n--> 497 arrays.append(pa.array(typed_sequence))\r\n 498 inferred_features[col] = typed_sequence.get_inferred_type()\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/pyarrow\/array.pxi in pyarrow.lib.array()\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/pyarrow\/array.pxi in pyarrow.lib._handle_arrow_array_protocol()\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in __arrow_array__(self, type)\r\n 204 # We only do it if trying_type is False - since this is what the user asks for.\r\n--> 205 out = cast_array_to_feature(out, type, allow_number_to_str=not self.trying_type)\r\n 206 return out\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1064 if isinstance(feature, list):\r\n-> 1065 return pa.ListArray.from_arrays(array.offsets, _c(array.values, feature[0]))\r\n 1066 elif isinstance(feature, Sequence):\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in (.0)\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in (.0)\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1086 return array_cast(array, feature(), allow_number_to_str=allow_number_to_str)\r\n-> 1087 raise TypeError(f\"Couldn't cast array of type\\n{array.type}\\nto\\n{feature}\")\r\n 1088 \r\n\r\nTypeError: Couldn't cast array of type\r\nstruct<\u533b\u9662-3.0T MRI: string, \u533b\u9662-CT: string, \u533b\u9662-DSA: string, \u533b\u9662-\u516c\u4ea4\u7ebf\u8def: string, \u533b\u9662-\u533a\u57df: string, \u533b\u9662-\u540d\u79f0: string, \u533b\u9662-\u5730\u5740: string, \u533b\u9662-\u5730\u94c1\u53ef\u8fbe: string, \u533b\u9662-\u5730\u94c1\u7ebf\u8def: string, \u533b\u9662-\u6027\u8d28: string, \u533b\u9662-\u6302\u53f7\u65f6\u95f4: string, \u533b\u9662-\u7535\u8bdd: string, \u533b\u9662-\u7b49\u7ea7: string, \u533b\u9662-\u7c7b\u522b: string, \u533b\u9662-\u91cd\u70b9\u79d1\u5ba4: string, \u533b\u9662-\u95e8\u8bca\u65f6\u95f4: string, \u5929\u6c14-\u57ce\u5e02: string, \u5929\u6c14-\u5929\u6c14: string, \u5929\u6c14-\u65e5\u671f: string, \u5929\u6c14-\u6e29\u5ea6: string, \u5929\u6c14-\u7d2b\u5916\u7ebf\u5f3a\u5ea6: string, \u5929\u6c14-\u98ce\u529b\u98ce\u5411: string, \u65c5\u6e38\u666f\u70b9-\u533a\u57df: string, \u65c5\u6e38\u666f\u70b9-\u540d\u79f0: string, \u65c5\u6e38\u666f\u70b9-\u5730\u5740: string, \u65c5\u6e38\u666f\u70b9-\u5f00\u653e\u65f6\u95f4: string, \u65c5\u6e38\u666f\u70b9-\u662f\u5426\u5730\u94c1\u76f4\u8fbe: string, \u65c5\u6e38\u666f\u70b9-\u666f\u70b9\u7c7b\u578b: string, \u65c5\u6e38\u666f\u70b9-\u6700\u9002\u5408\u4eba\u7fa4: string, \u65c5\u6e38\u666f\u70b9-\u6d88\u8d39: string, \u65c5\u6e38\u666f\u70b9-\u7279\u70b9: string, \u65c5\u6e38\u666f\u70b9-\u7535\u8bdd\u53f7\u7801: string, \u65c5\u6e38\u666f\u70b9-\u8bc4\u5206: string, \u65c5\u6e38\u666f\u70b9-\u95e8\u7968\u4ef7\u683c: string, \u6c7d\u8f66-\u4ef7\u683c(\u4e07\u5143): string, \u6c7d\u8f66-\u5012\u8f66\u5f71\u50cf: string, \u6c7d\u8f66-\u52a8\u529b\u6c34\u5e73: string, \u6c7d\u8f66-\u5382\u5546: string, \u6c7d\u8f66-\u53d1\u52a8\u673a\u6392\u91cf(L): string, \u6c7d\u8f66-\u53d1\u52a8\u673a\u9a6c\u529b(Ps): string, \u6c7d\u8f66-\u540d\u79f0: string, \u6c7d\u8f66-\u5b9a\u901f\u5de1\u822a: string, \u6c7d\u8f66-\u5de1\u822a\u7cfb\u7edf: string, \u6c7d\u8f66-\u5ea7\u4f4d\u6570: string, \u6c7d\u8f66-\u5ea7\u6905\u52a0\u70ed: string, \u6c7d\u8f66-\u5ea7\u6905\u901a\u98ce: string, \u6c7d\u8f66-\u6240\u5c5e\u4ef7\u683c\u533a\u95f4: string, \u6c7d\u8f66-\u6cb9\u8017\u6c34\u5e73: string, \u6c7d\u8f66-\u73af\u4fdd\u6807\u51c6: string, \u6c7d\u8f66-\u7ea7\u522b: string, \u6c7d\u8f66-\u7efc\u5408\u6cb9\u8017(L\/100km): string, \u6c7d\u8f66-\u80fd\u6e90\u7c7b\u578b: string, \u6c7d\u8f66-\u8f66\u578b: string, \u6c7d\u8f66-\u8f66\u7cfb: string, \u6c7d\u8f66-\u8f66\u8eab\u5c3a\u5bf8(mm): string, \u6c7d\u8f66-\u9a71\u52a8\u65b9\u5f0f: string, \u6c7d\u8f66-\u9a7e\u9a76\u8f85\u52a9\u5f71\u50cf: string, \u706b\u8f66-\u51fa\u53d1\u5730: string, \u706b\u8f66-\u51fa\u53d1\u65f6\u95f4: string, \u706b\u8f66-\u5230\u8fbe\u65f6\u95f4: string, \u706b\u8f66-\u5750\u5e2d: string, \u706b\u8f66-\u65e5\u671f: string, \u706b\u8f66-\u65f6\u957f: string, \u706b\u8f66-\u76ee\u7684\u5730: string, \u706b\u8f66-\u7968\u4ef7: string, \u706b\u8f66-\u8231\u4f4d\u6863\u6b21: string, \u706b\u8f66-\u8f66\u578b: string, \u706b\u8f66-\u8f66\u6b21\u4fe1\u606f: string, \u7535\u5f71-\u4e3b\u6f14: string, \u7535\u5f71-\u4e3b\u6f14\u540d\u5355: string, \u7535\u5f71-\u5177\u4f53\u4e0a\u6620\u65f6\u95f4: string, \u7535\u5f71-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a: string, \u7535\u5f71-\u5bfc\u6f14: string, \u7535\u5f71-\u5e74\u4ee3: string, \u7535\u5f71-\u7247\u540d: string, \u7535\u5f71-\u7247\u957f: string, \u7535\u5f71-\u7c7b\u578b: string, \u7535\u5f71-\u8c46\u74e3\u8bc4\u5206: string, \u7535\u8111-CPU: string, \u7535\u8111-CPU\u578b\u53f7: string, \u7535\u8111-\u4ea7\u54c1\u7c7b\u522b: string, \u7535\u8111-\u4ef7\u683c: string, \u7535\u8111-\u4ef7\u683c\u533a\u95f4: string, \u7535\u8111-\u5185\u5b58\u5bb9\u91cf: string, \u7535\u8111-\u5206\u7c7b: string, \u7535\u8111-\u54c1\u724c: string, \u7535\u8111-\u5546\u54c1\u540d\u79f0: string, \u7535\u8111-\u5c4f\u5e55\u5c3a\u5bf8: string, \u7535\u8111-\u5f85\u673a\u65f6\u957f: string, \u7535\u8111-\u663e\u5361\u578b\u53f7: string, \u7535\u8111-\u663e\u5361\u7c7b\u522b: string, \u7535\u8111-\u6e38\u620f\u6027\u80fd: string, \u7535\u8111-\u7279\u6027: string, \u7535\u8111-\u786c\u76d8\u5bb9\u91cf: string, \u7535\u8111-\u7cfb\u5217: string, \u7535\u8111-\u7cfb\u7edf: string, \u7535\u8111-\u8272\u7cfb: string, \u7535\u8111-\u88f8\u673a\u91cd\u91cf: string, \u7535\u89c6\u5267-\u4e3b\u6f14: string, \u7535\u89c6\u5267-\u4e3b\u6f14\u540d\u5355: string, \u7535\u89c6\u5267-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a: string, \u7535\u89c6\u5267-\u5355\u96c6\u7247\u957f: string, \u7535\u89c6\u5267-\u5bfc\u6f14: string, \u7535\u89c6\u5267-\u5e74\u4ee3: string, \u7535\u89c6\u5267-\u7247\u540d: string, \u7535\u89c6\u5267-\u7c7b\u578b: string, \u7535\u89c6\u5267-\u8c46\u74e3\u8bc4\u5206: string, \u7535\u89c6\u5267-\u96c6\u6570: string, \u7535\u89c6\u5267-\u9996\u64ad\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65b9\u5f0f: string, \u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4e0b\u8bfe\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4ef7\u683c: string, \u8f85\u5bfc\u73ed-\u533a\u57df: string, \u8f85\u5bfc\u73ed-\u5e74\u7ea7: string, \u8f85\u5bfc\u73ed-\u5f00\u59cb\u65e5\u671f: string, \u8f85\u5bfc\u73ed-\u6559\u5ba4\u5730\u70b9: string, \u8f85\u5bfc\u73ed-\u6559\u5e08: string, \u8f85\u5bfc\u73ed-\u6559\u5e08\u7f51\u5740: string, \u8f85\u5bfc\u73ed-\u65f6\u6bb5: string, \u8f85\u5bfc\u73ed-\u6821\u533a: string, \u8f85\u5bfc\u73ed-\u6bcf\u5468: string, \u8f85\u5bfc\u73ed-\u73ed\u53f7: string, \u8f85\u5bfc\u73ed-\u79d1\u76ee: string, \u8f85\u5bfc\u73ed-\u7ed3\u675f\u65e5\u671f: string, \u8f85\u5bfc\u73ed-\u8bfe\u65f6: string, \u8f85\u5bfc\u73ed-\u8bfe\u6b21: string, \u8f85\u5bfc\u73ed-\u8bfe\u7a0b\u7f51\u5740: string, \u8f85\u5bfc\u73ed-\u96be\u5ea6: string, \u901a\u7528-\u4ea7\u54c1\u7c7b\u522b: string, \u901a\u7528-\u4ef7\u683c\u533a\u95f4: string, \u901a\u7528-\u54c1\u724c: string, \u901a\u7528-\u7cfb\u5217: string, \u9152\u5e97-\u4ef7\u4f4d: string, \u9152\u5e97-\u505c\u8f66\u573a: string, \u9152\u5e97-\u533a\u57df: string, \u9152\u5e97-\u540d\u79f0: string, \u9152\u5e97-\u5730\u5740: string, \u9152\u5e97-\u623f\u578b: string, \u9152\u5e97-\u623f\u8d39: string, \u9152\u5e97-\u661f\u7ea7: string, \u9152\u5e97-\u7535\u8bdd\u53f7\u7801: string, \u9152\u5e97-\u8bc4\u5206: string, \u9152\u5e97-\u9152\u5e97\u7c7b\u578b: string, \u98de\u673a-\u51c6\u70b9\u7387: string, \u98de\u673a-\u51fa\u53d1\u5730: string, \u98de\u673a-\u5230\u8fbe\u65f6\u95f4: string, \u98de\u673a-\u65e5\u671f: string, \u98de\u673a-\u76ee\u7684\u5730: string, \u98de\u673a-\u7968\u4ef7: string, \u98de\u673a-\u822a\u73ed\u4fe1\u606f: string, \u98de\u673a-\u8231\u4f4d\u6863\u6b21: string, \u98de\u673a-\u8d77\u98de\u65f6\u95f4: string, \u9910\u5385-\u4eba\u5747\u6d88\u8d39: string, \u9910\u5385-\u4ef7\u4f4d: string, \u9910\u5385-\u533a\u57df: string, \u9910\u5385-\u540d\u79f0: string, \u9910\u5385-\u5730\u5740: string, \u9910\u5385-\u63a8\u8350\u83dc: string, \u9910\u5385-\u662f\u5426\u5730\u94c1\u76f4\u8fbe: string, \u9910\u5385-\u7535\u8bdd\u53f7\u7801: string, \u9910\u5385-\u83dc\u7cfb: string, \u9910\u5385-\u8425\u4e1a\u65f6\u95f4: string, \u9910\u5385-\u8bc4\u5206: string>\r\nto\r\n{'\u65c5\u6e38\u666f\u70b9-\u540d\u79f0': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u533a\u57df': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u666f\u70b9\u7c7b\u578b': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u6700\u9002\u5408\u4eba\u7fa4': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u6d88\u8d39': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u662f\u5426\u5730\u94c1\u76f4\u8fbe': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u95e8\u7968\u4ef7\u683c': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u5730\u5740': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u8bc4\u5206': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u5f00\u653e\u65f6\u95f4': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u7279\u70b9': Value(dtype='string', id=None), '\u9910\u5385-\u540d\u79f0': Value(dtype='string', id=None), '\u9910\u5385-\u533a\u57df': Value(dtype='string', id=None), '\u9910\u5385-\u83dc\u7cfb': Value(dtype='string', id=None), '\u9910\u5385-\u4ef7\u4f4d': Value(dtype='string', id=None), '\u9910\u5385-\u662f\u5426\u5730\u94c1\u76f4\u8fbe': Value(dtype='string', id=None), '\u9910\u5385-\u4eba\u5747\u6d88\u8d39': Value(dtype='string', id=None), '\u9910\u5385-\u5730\u5740': Value(dtype='string', id=None), '\u9910\u5385-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u9910\u5385-\u8bc4\u5206': Value(dtype='string', id=None), '\u9910\u5385-\u8425\u4e1a\u65f6\u95f4': Value(dtype='string', id=None), '\u9910\u5385-\u63a8\u8350\u83dc': Value(dtype='string', id=None), '\u9152\u5e97-\u540d\u79f0': Value(dtype='string', id=None), '\u9152\u5e97-\u533a\u57df': Value(dtype='string', id=None), '\u9152\u5e97-\u661f\u7ea7': Value(dtype='string', id=None), '\u9152\u5e97-\u4ef7\u4f4d': Value(dtype='string', id=None), '\u9152\u5e97-\u9152\u5e97\u7c7b\u578b': Value(dtype='string', id=None), '\u9152\u5e97-\u623f\u578b': Value(dtype='string', id=None), '\u9152\u5e97-\u505c\u8f66\u573a': Value(dtype='string', id=None), '\u9152\u5e97-\u623f\u8d39': Value(dtype='string', id=None), '\u9152\u5e97-\u5730\u5740': Value(dtype='string', id=None), '\u9152\u5e97-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u9152\u5e97-\u8bc4\u5206': Value(dtype='string', id=None), '\u7535\u8111-\u54c1\u724c': Value(dtype='string', id=None), '\u7535\u8111-\u4ea7\u54c1\u7c7b\u522b': Value(dtype='string', id=None), '\u7535\u8111-\u5206\u7c7b': Value(dtype='string', id=None), '\u7535\u8111-\u5185\u5b58\u5bb9\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u5c4f\u5e55\u5c3a\u5bf8': Value(dtype='string', id=None), '\u7535\u8111-CPU': Value(dtype='string', id=None), '\u7535\u8111-\u4ef7\u683c\u533a\u95f4': Value(dtype='string', id=None), '\u7535\u8111-\u7cfb\u5217': Value(dtype='string', id=None), '\u7535\u8111-\u5546\u54c1\u540d\u79f0': Value(dtype='string', id=None), '\u7535\u8111-\u7cfb\u7edf': Value(dtype='string', id=None), '\u7535\u8111-\u6e38\u620f\u6027\u80fd': Value(dtype='string', id=None), '\u7535\u8111-CPU\u578b\u53f7': Value(dtype='string', id=None), '\u7535\u8111-\u88f8\u673a\u91cd\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u663e\u5361\u7c7b\u522b': Value(dtype='string', id=None), '\u7535\u8111-\u663e\u5361\u578b\u53f7': Value(dtype='string', id=None), '\u7535\u8111-\u7279\u6027': Value(dtype='string', id=None), '\u7535\u8111-\u8272\u7cfb': Value(dtype='string', id=None), '\u7535\u8111-\u5f85\u673a\u65f6\u957f': Value(dtype='string', id=None), '\u7535\u8111-\u786c\u76d8\u5bb9\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u4ef7\u683c': Value(dtype='string', id=None), '\u706b\u8f66-\u51fa\u53d1\u5730': Value(dtype='string', id=None), '\u706b\u8f66-\u76ee\u7684\u5730': Value(dtype='string', id=None), '\u706b\u8f66-\u65e5\u671f': Value(dtype='string', id=None), '\u706b\u8f66-\u8f66\u578b': Value(dtype='string', id=None), '\u706b\u8f66-\u5750\u5e2d': Value(dtype='string', id=None), '\u706b\u8f66-\u8f66\u6b21\u4fe1\u606f': Value(dtype='string', id=None), '\u706b\u8f66-\u65f6\u957f': Value(dtype='string', id=None), '\u706b\u8f66-\u51fa\u53d1\u65f6\u95f4': Value(dtype='string', id=None), '\u706b\u8f66-\u5230\u8fbe\u65f6\u95f4': Value(dtype='string', id=None), '\u706b\u8f66-\u7968\u4ef7': Value(dtype='string', id=None), '\u98de\u673a-\u51fa\u53d1\u5730': Value(dtype='string', id=None), '\u98de\u673a-\u76ee\u7684\u5730': Value(dtype='string', id=None), '\u98de\u673a-\u65e5\u671f': Value(dtype='string', id=None), '\u98de\u673a-\u8231\u4f4d\u6863\u6b21': Value(dtype='string', id=None), '\u98de\u673a-\u822a\u73ed\u4fe1\u606f': Value(dtype='string', id=None), '\u98de\u673a-\u8d77\u98de\u65f6\u95f4': Value(dtype='string', id=None), '\u98de\u673a-\u5230\u8fbe\u65f6\u95f4': Value(dtype='string', id=None), '\u98de\u673a-\u7968\u4ef7': Value(dtype='string', id=None), '\u98de\u673a-\u51c6\u70b9\u7387': Value(dtype='string', id=None), '\u5929\u6c14-\u57ce\u5e02': Value(dtype='string', id=None), '\u5929\u6c14-\u65e5\u671f': Value(dtype='string', id=None), '\u5929\u6c14-\u5929\u6c14': Value(dtype='string', id=None), '\u5929\u6c14-\u6e29\u5ea6': Value(dtype='string', id=None), '\u5929\u6c14-\u98ce\u529b\u98ce\u5411': Value(dtype='string', id=None), '\u5929\u6c14-\u7d2b\u5916\u7ebf\u5f3a\u5ea6': Value(dtype='string', id=None), '\u7535\u5f71-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a': Value(dtype='string', id=None), '\u7535\u5f71-\u7c7b\u578b': Value(dtype='string', id=None), '\u7535\u5f71-\u5e74\u4ee3': Value(dtype='string', id=None), '\u7535\u5f71-\u4e3b\u6f14': Value(dtype='string', id=None), '\u7535\u5f71-\u5bfc\u6f14': Value(dtype='string', id=None), '\u7535\u5f71-\u7247\u540d': Value(dtype='string', id=None), '\u7535\u5f71-\u4e3b\u6f14\u540d\u5355': Value(dtype='string', id=None), '\u7535\u5f71-\u5177\u4f53\u4e0a\u6620\u65f6\u95f4': Value(dtype='string', id=None), '\u7535\u5f71-\u7247\u957f': Value(dtype='string', id=None), '\u7535\u5f71-\u8c46\u74e3\u8bc4\u5206': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u7c7b\u578b': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5e74\u4ee3': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u4e3b\u6f14': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5bfc\u6f14': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u7247\u540d': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u4e3b\u6f14\u540d\u5355': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u9996\u64ad\u65f6\u95f4': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u96c6\u6570': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5355\u96c6\u7247\u957f': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u8c46\u74e3\u8bc4\u5206': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u73ed\u53f7': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u96be\u5ea6': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u79d1\u76ee': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u5e74\u7ea7': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u533a\u57df': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6821\u533a': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65b9\u5f0f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u5f00\u59cb\u65e5\u671f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u7ed3\u675f\u65e5\u671f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6bcf\u5468': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65f6\u95f4': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0b\u8bfe\u65f6\u95f4': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u65f6\u6bb5': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u6b21': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u65f6': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5ba4\u5730\u70b9': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5e08': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4ef7\u683c': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u7a0b\u7f51\u5740': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5e08\u7f51\u5740': Value(dtype='string', id=None), '\u6c7d\u8f66-\u540d\u79f0': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u578b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u7ea7\u522b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u4f4d\u6570': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u8eab\u5c3a\u5bf8(mm)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5382\u5546': Value(dtype='string', id=None), '\u6c7d\u8f66-\u80fd\u6e90\u7c7b\u578b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u53d1\u52a8\u673a\u6392\u91cf(L)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u53d1\u52a8\u673a\u9a6c\u529b(Ps)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u9a71\u52a8\u65b9\u5f0f': Value(dtype='string', id=None), '\u6c7d\u8f66-\u7efc\u5408\u6cb9\u8017(L\/100km)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u73af\u4fdd\u6807\u51c6': Value(dtype='string', id=None), '\u6c7d\u8f66-\u9a7e\u9a76\u8f85\u52a9\u5f71\u50cf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5de1\u822a\u7cfb\u7edf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u4ef7\u683c(\u4e07\u5143)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u7cfb': Value(dtype='string', id=None), '\u6c7d\u8f66-\u52a8\u529b\u6c34\u5e73': Value(dtype='string', id=None), '\u6c7d\u8f66-\u6cb9\u8017\u6c34\u5e73': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5012\u8f66\u5f71\u50cf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5b9a\u901f\u5de1\u822a': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u6905\u52a0\u70ed': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u6905\u901a\u98ce': Value(dtype='string', id=None), '\u6c7d\u8f66-\u6240\u5c5e\u4ef7\u683c\u533a\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u540d\u79f0': Value(dtype='string', id=None), '\u533b\u9662-\u7b49\u7ea7': Value(dtype='string', id=None), '\u533b\u9662-\u7c7b\u522b': Value(dtype='string', id=None), '\u533b\u9662-\u6027\u8d28': Value(dtype='string', id=None), '\u533b\u9662-\u533a\u57df': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u5740': Value(dtype='string', id=None), '\u533b\u9662-\u7535\u8bdd': Value(dtype='string', id=None), '\u533b\u9662-\u6302\u53f7\u65f6\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u95e8\u8bca\u65f6\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u516c\u4ea4\u7ebf\u8def': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u94c1\u53ef\u8fbe': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u94c1\u7ebf\u8def': Value(dtype='string', id=None), '\u533b\u9662-\u91cd\u70b9\u79d1\u5ba4': Value(dtype='string', id=None), '\u533b\u9662-CT': Value(dtype='string', id=None), '\u533b\u9662-3.0T MRI': Value(dtype='string', id=None), '\u533b\u9662-DSA': Value(dtype='string', id=None)}\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTypeError Traceback (most recent call last)\r\n\/var\/folders\/28\/k4cy5q7s2hs92xq7_h89_vgm0000gn\/T\/ipykernel_44306\/2896005239.py in \r\n----> 1 dset = load_dataset(\"GEM\/RiSAWOZ\")\r\n 2 dset\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, revision, use_auth_token, task, streaming, script_version, **config_kwargs)\r\n 1692 \r\n 1693 # Download and prepare data\r\n-> 1694 builder_instance.download_and_prepare(\r\n 1695 download_config=download_config,\r\n 1696 download_mode=download_mode,\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 593 logger.warning(\"HF google storage unreachable. Downloading and preparing it from source\")\r\n 594 if not downloaded_from_gcs:\r\n--> 595 self._download_and_prepare(\r\n 596 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 597 )\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 682 try:\r\n 683 # Prepare split will record examples associated to the split\r\n--> 684 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 685 except OSError as e:\r\n 686 raise OSError(\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/builder.py in _prepare_split(self, split_generator)\r\n 1084 writer.write(example, key)\r\n 1085 finally:\r\n-> 1086 num_examples, num_bytes = writer.finalize()\r\n 1087 \r\n 1088 split_generator.split_info.num_examples = num_examples\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in finalize(self, close_stream)\r\n 525 # Re-intializing to empty list for next batch\r\n 526 self.hkey_record = []\r\n--> 527 self.write_examples_on_file()\r\n 528 if self.pa_writer is None:\r\n 529 if self.schema:\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in write_examples_on_file(self)\r\n 402 # Since current_examples contains (example, key) tuples\r\n 403 batch_examples[col] = [row[0][col] for row in self.current_examples]\r\n--> 404 self.write_batch(batch_examples=batch_examples)\r\n 405 self.current_examples = []\r\n 406 \r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in write_batch(self, batch_examples, writer_batch_size)\r\n 495 col_try_type = try_features[col] if try_features is not None and col in try_features else None\r\n 496 typed_sequence = OptimizedTypedSequence(batch_examples[col], type=col_type, try_type=col_try_type, col=col)\r\n--> 497 arrays.append(pa.array(typed_sequence))\r\n 498 inferred_features[col] = typed_sequence.get_inferred_type()\r\n 499 schema = inferred_features.arrow_schema if self.pa_writer is None else self.schema\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/pyarrow\/array.pxi in pyarrow.lib.array()\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/pyarrow\/array.pxi in pyarrow.lib._handle_arrow_array_protocol()\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py in __arrow_array__(self, type)\r\n 203 # Also, when trying type \"string\", we don't want to convert integers or floats to \"string\".\r\n 204 # We only do it if trying_type is False - since this is what the user asks for.\r\n--> 205 out = cast_array_to_feature(out, type, allow_number_to_str=not self.trying_type)\r\n 206 return out\r\n 207 except (TypeError, pa.lib.ArrowInvalid) as e: # handle type errors and overflows\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 942 if pa.types.is_list(array.type) and config.PYARROW_VERSION < version.parse(\"4.0.0\"):\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n 946 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 918 return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n 922 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1063 # feature must be either [subfeature] or Sequence(subfeature)\r\n 1064 if isinstance(feature, list):\r\n-> 1065 return pa.ListArray.from_arrays(array.offsets, _c(array.values, feature[0]))\r\n 1066 elif isinstance(feature, Sequence):\r\n 1067 if feature.length > -1:\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 942 if pa.types.is_list(array.type) and config.PYARROW_VERSION < version.parse(\"4.0.0\"):\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n 946 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 918 return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n 922 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1058 }\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n 1062 elif pa.types.is_list(array.type):\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in (.0)\r\n 1058 }\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n 1062 elif pa.types.is_list(array.type):\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 942 if pa.types.is_list(array.type) and config.PYARROW_VERSION < version.parse(\"4.0.0\"):\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n 946 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 918 return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n 922 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1058 }\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n 1062 elif pa.types.is_list(array.type):\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in (.0)\r\n 1058 }\r\n 1059 if isinstance(feature, dict) and set(field.name for field in array.type) == set(feature):\r\n-> 1060 arrays = [_c(array.field(name), subfeature) for name, subfeature in feature.items()]\r\n 1061 return pa.StructArray.from_arrays(arrays, names=list(feature))\r\n 1062 elif pa.types.is_list(array.type):\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 942 if pa.types.is_list(array.type) and config.PYARROW_VERSION < version.parse(\"4.0.0\"):\r\n 943 array = _sanitize(array)\r\n--> 944 return func(array, *args, **kwargs)\r\n 945 \r\n 946 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in wrapper(array, *args, **kwargs)\r\n 918 return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])\r\n 919 else:\r\n--> 920 return func(array, *args, **kwargs)\r\n 921 \r\n 922 return wrapper\r\n\r\n~\/miniconda3\/envs\/huggingface\/lib\/python3.8\/site-packages\/datasets\/table.py in cast_array_to_feature(array, feature, allow_number_to_str)\r\n 1085 elif not isinstance(feature, (Sequence, dict, list, tuple)):\r\n 1086 return array_cast(array, feature(), allow_number_to_str=allow_number_to_str)\r\n-> 1087 raise TypeError(f\"Couldn't cast array of type\\n{array.type}\\nto\\n{feature}\")\r\n 1088 \r\n 1089 \r\n\r\nTypeError: Couldn't cast array of type\r\nstruct<\u533b\u9662-3.0T MRI: string, \u533b\u9662-CT: string, \u533b\u9662-DSA: string, \u533b\u9662-\u516c\u4ea4\u7ebf\u8def: string, \u533b\u9662-\u533a\u57df: string, \u533b\u9662-\u540d\u79f0: string, \u533b\u9662-\u5730\u5740: string, \u533b\u9662-\u5730\u94c1\u53ef\u8fbe: string, \u533b\u9662-\u5730\u94c1\u7ebf\u8def: string, \u533b\u9662-\u6027\u8d28: string, \u533b\u9662-\u6302\u53f7\u65f6\u95f4: string, \u533b\u9662-\u7535\u8bdd: string, \u533b\u9662-\u7b49\u7ea7: string, \u533b\u9662-\u7c7b\u522b: string, \u533b\u9662-\u91cd\u70b9\u79d1\u5ba4: string, \u533b\u9662-\u95e8\u8bca\u65f6\u95f4: string, \u5929\u6c14-\u57ce\u5e02: string, \u5929\u6c14-\u5929\u6c14: string, \u5929\u6c14-\u65e5\u671f: string, \u5929\u6c14-\u6e29\u5ea6: string, \u5929\u6c14-\u7d2b\u5916\u7ebf\u5f3a\u5ea6: string, \u5929\u6c14-\u98ce\u529b\u98ce\u5411: string, \u65c5\u6e38\u666f\u70b9-\u533a\u57df: string, \u65c5\u6e38\u666f\u70b9-\u540d\u79f0: string, \u65c5\u6e38\u666f\u70b9-\u5730\u5740: string, \u65c5\u6e38\u666f\u70b9-\u5f00\u653e\u65f6\u95f4: string, \u65c5\u6e38\u666f\u70b9-\u662f\u5426\u5730\u94c1\u76f4\u8fbe: string, \u65c5\u6e38\u666f\u70b9-\u666f\u70b9\u7c7b\u578b: string, \u65c5\u6e38\u666f\u70b9-\u6700\u9002\u5408\u4eba\u7fa4: string, \u65c5\u6e38\u666f\u70b9-\u6d88\u8d39: string, \u65c5\u6e38\u666f\u70b9-\u7279\u70b9: string, \u65c5\u6e38\u666f\u70b9-\u7535\u8bdd\u53f7\u7801: string, \u65c5\u6e38\u666f\u70b9-\u8bc4\u5206: string, \u65c5\u6e38\u666f\u70b9-\u95e8\u7968\u4ef7\u683c: string, \u6c7d\u8f66-\u4ef7\u683c(\u4e07\u5143): string, \u6c7d\u8f66-\u5012\u8f66\u5f71\u50cf: string, \u6c7d\u8f66-\u52a8\u529b\u6c34\u5e73: string, \u6c7d\u8f66-\u5382\u5546: string, \u6c7d\u8f66-\u53d1\u52a8\u673a\u6392\u91cf(L): string, \u6c7d\u8f66-\u53d1\u52a8\u673a\u9a6c\u529b(Ps): string, \u6c7d\u8f66-\u540d\u79f0: string, \u6c7d\u8f66-\u5b9a\u901f\u5de1\u822a: string, \u6c7d\u8f66-\u5de1\u822a\u7cfb\u7edf: string, \u6c7d\u8f66-\u5ea7\u4f4d\u6570: string, \u6c7d\u8f66-\u5ea7\u6905\u52a0\u70ed: string, \u6c7d\u8f66-\u5ea7\u6905\u901a\u98ce: string, \u6c7d\u8f66-\u6240\u5c5e\u4ef7\u683c\u533a\u95f4: string, \u6c7d\u8f66-\u6cb9\u8017\u6c34\u5e73: string, \u6c7d\u8f66-\u73af\u4fdd\u6807\u51c6: string, \u6c7d\u8f66-\u7ea7\u522b: string, \u6c7d\u8f66-\u7efc\u5408\u6cb9\u8017(L\/100km): string, \u6c7d\u8f66-\u80fd\u6e90\u7c7b\u578b: string, \u6c7d\u8f66-\u8f66\u578b: string, \u6c7d\u8f66-\u8f66\u7cfb: string, \u6c7d\u8f66-\u8f66\u8eab\u5c3a\u5bf8(mm): string, \u6c7d\u8f66-\u9a71\u52a8\u65b9\u5f0f: string, \u6c7d\u8f66-\u9a7e\u9a76\u8f85\u52a9\u5f71\u50cf: string, \u706b\u8f66-\u51fa\u53d1\u5730: string, \u706b\u8f66-\u51fa\u53d1\u65f6\u95f4: string, \u706b\u8f66-\u5230\u8fbe\u65f6\u95f4: string, \u706b\u8f66-\u5750\u5e2d: string, \u706b\u8f66-\u65e5\u671f: string, \u706b\u8f66-\u65f6\u957f: string, \u706b\u8f66-\u76ee\u7684\u5730: string, \u706b\u8f66-\u7968\u4ef7: string, \u706b\u8f66-\u8231\u4f4d\u6863\u6b21: string, \u706b\u8f66-\u8f66\u578b: string, \u706b\u8f66-\u8f66\u6b21\u4fe1\u606f: string, \u7535\u5f71-\u4e3b\u6f14: string, \u7535\u5f71-\u4e3b\u6f14\u540d\u5355: string, \u7535\u5f71-\u5177\u4f53\u4e0a\u6620\u65f6\u95f4: string, \u7535\u5f71-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a: string, \u7535\u5f71-\u5bfc\u6f14: string, \u7535\u5f71-\u5e74\u4ee3: string, \u7535\u5f71-\u7247\u540d: string, \u7535\u5f71-\u7247\u957f: string, \u7535\u5f71-\u7c7b\u578b: string, \u7535\u5f71-\u8c46\u74e3\u8bc4\u5206: string, \u7535\u8111-CPU: string, \u7535\u8111-CPU\u578b\u53f7: string, \u7535\u8111-\u4ea7\u54c1\u7c7b\u522b: string, \u7535\u8111-\u4ef7\u683c: string, \u7535\u8111-\u4ef7\u683c\u533a\u95f4: string, \u7535\u8111-\u5185\u5b58\u5bb9\u91cf: string, \u7535\u8111-\u5206\u7c7b: string, \u7535\u8111-\u54c1\u724c: string, \u7535\u8111-\u5546\u54c1\u540d\u79f0: string, \u7535\u8111-\u5c4f\u5e55\u5c3a\u5bf8: string, \u7535\u8111-\u5f85\u673a\u65f6\u957f: string, \u7535\u8111-\u663e\u5361\u578b\u53f7: string, \u7535\u8111-\u663e\u5361\u7c7b\u522b: string, \u7535\u8111-\u6e38\u620f\u6027\u80fd: string, \u7535\u8111-\u7279\u6027: string, \u7535\u8111-\u786c\u76d8\u5bb9\u91cf: string, \u7535\u8111-\u7cfb\u5217: string, \u7535\u8111-\u7cfb\u7edf: string, \u7535\u8111-\u8272\u7cfb: string, \u7535\u8111-\u88f8\u673a\u91cd\u91cf: string, \u7535\u89c6\u5267-\u4e3b\u6f14: string, \u7535\u89c6\u5267-\u4e3b\u6f14\u540d\u5355: string, \u7535\u89c6\u5267-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a: string, \u7535\u89c6\u5267-\u5355\u96c6\u7247\u957f: string, \u7535\u89c6\u5267-\u5bfc\u6f14: string, \u7535\u89c6\u5267-\u5e74\u4ee3: string, \u7535\u89c6\u5267-\u7247\u540d: string, \u7535\u89c6\u5267-\u7c7b\u578b: string, \u7535\u89c6\u5267-\u8c46\u74e3\u8bc4\u5206: string, \u7535\u89c6\u5267-\u96c6\u6570: string, \u7535\u89c6\u5267-\u9996\u64ad\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65b9\u5f0f: string, \u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4e0b\u8bfe\u65f6\u95f4: string, \u8f85\u5bfc\u73ed-\u4ef7\u683c: string, \u8f85\u5bfc\u73ed-\u533a\u57df: string, \u8f85\u5bfc\u73ed-\u5e74\u7ea7: string, \u8f85\u5bfc\u73ed-\u5f00\u59cb\u65e5\u671f: string, \u8f85\u5bfc\u73ed-\u6559\u5ba4\u5730\u70b9: string, \u8f85\u5bfc\u73ed-\u6559\u5e08: string, \u8f85\u5bfc\u73ed-\u6559\u5e08\u7f51\u5740: string, \u8f85\u5bfc\u73ed-\u65f6\u6bb5: string, \u8f85\u5bfc\u73ed-\u6821\u533a: string, \u8f85\u5bfc\u73ed-\u6bcf\u5468: string, \u8f85\u5bfc\u73ed-\u73ed\u53f7: string, \u8f85\u5bfc\u73ed-\u79d1\u76ee: string, \u8f85\u5bfc\u73ed-\u7ed3\u675f\u65e5\u671f: string, \u8f85\u5bfc\u73ed-\u8bfe\u65f6: string, \u8f85\u5bfc\u73ed-\u8bfe\u6b21: string, \u8f85\u5bfc\u73ed-\u8bfe\u7a0b\u7f51\u5740: string, \u8f85\u5bfc\u73ed-\u96be\u5ea6: string, \u901a\u7528-\u4ea7\u54c1\u7c7b\u522b: string, \u901a\u7528-\u4ef7\u683c\u533a\u95f4: string, \u901a\u7528-\u54c1\u724c: string, \u901a\u7528-\u7cfb\u5217: string, \u9152\u5e97-\u4ef7\u4f4d: string, \u9152\u5e97-\u505c\u8f66\u573a: string, \u9152\u5e97-\u533a\u57df: string, \u9152\u5e97-\u540d\u79f0: string, \u9152\u5e97-\u5730\u5740: string, \u9152\u5e97-\u623f\u578b: string, \u9152\u5e97-\u623f\u8d39: string, \u9152\u5e97-\u661f\u7ea7: string, \u9152\u5e97-\u7535\u8bdd\u53f7\u7801: string, \u9152\u5e97-\u8bc4\u5206: string, \u9152\u5e97-\u9152\u5e97\u7c7b\u578b: string, \u98de\u673a-\u51c6\u70b9\u7387: string, \u98de\u673a-\u51fa\u53d1\u5730: string, \u98de\u673a-\u5230\u8fbe\u65f6\u95f4: string, \u98de\u673a-\u65e5\u671f: string, \u98de\u673a-\u76ee\u7684\u5730: string, \u98de\u673a-\u7968\u4ef7: string, \u98de\u673a-\u822a\u73ed\u4fe1\u606f: string, \u98de\u673a-\u8231\u4f4d\u6863\u6b21: string, \u98de\u673a-\u8d77\u98de\u65f6\u95f4: string, \u9910\u5385-\u4eba\u5747\u6d88\u8d39: string, \u9910\u5385-\u4ef7\u4f4d: string, \u9910\u5385-\u533a\u57df: string, \u9910\u5385-\u540d\u79f0: string, \u9910\u5385-\u5730\u5740: string, \u9910\u5385-\u63a8\u8350\u83dc: string, \u9910\u5385-\u662f\u5426\u5730\u94c1\u76f4\u8fbe: string, \u9910\u5385-\u7535\u8bdd\u53f7\u7801: string, \u9910\u5385-\u83dc\u7cfb: string, \u9910\u5385-\u8425\u4e1a\u65f6\u95f4: string, \u9910\u5385-\u8bc4\u5206: string>\r\nto\r\n{'\u65c5\u6e38\u666f\u70b9-\u540d\u79f0': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u533a\u57df': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u666f\u70b9\u7c7b\u578b': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u6700\u9002\u5408\u4eba\u7fa4': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u6d88\u8d39': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u662f\u5426\u5730\u94c1\u76f4\u8fbe': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u95e8\u7968\u4ef7\u683c': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u5730\u5740': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u8bc4\u5206': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u5f00\u653e\u65f6\u95f4': Value(dtype='string', id=None), '\u65c5\u6e38\u666f\u70b9-\u7279\u70b9': Value(dtype='string', id=None), '\u9910\u5385-\u540d\u79f0': Value(dtype='string', id=None), '\u9910\u5385-\u533a\u57df': Value(dtype='string', id=None), '\u9910\u5385-\u83dc\u7cfb': Value(dtype='string', id=None), '\u9910\u5385-\u4ef7\u4f4d': Value(dtype='string', id=None), '\u9910\u5385-\u662f\u5426\u5730\u94c1\u76f4\u8fbe': Value(dtype='string', id=None), '\u9910\u5385-\u4eba\u5747\u6d88\u8d39': Value(dtype='string', id=None), '\u9910\u5385-\u5730\u5740': Value(dtype='string', id=None), '\u9910\u5385-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u9910\u5385-\u8bc4\u5206': Value(dtype='string', id=None), '\u9910\u5385-\u8425\u4e1a\u65f6\u95f4': Value(dtype='string', id=None), '\u9910\u5385-\u63a8\u8350\u83dc': Value(dtype='string', id=None), '\u9152\u5e97-\u540d\u79f0': Value(dtype='string', id=None), '\u9152\u5e97-\u533a\u57df': Value(dtype='string', id=None), '\u9152\u5e97-\u661f\u7ea7': Value(dtype='string', id=None), '\u9152\u5e97-\u4ef7\u4f4d': Value(dtype='string', id=None), '\u9152\u5e97-\u9152\u5e97\u7c7b\u578b': Value(dtype='string', id=None), '\u9152\u5e97-\u623f\u578b': Value(dtype='string', id=None), '\u9152\u5e97-\u505c\u8f66\u573a': Value(dtype='string', id=None), '\u9152\u5e97-\u623f\u8d39': Value(dtype='string', id=None), '\u9152\u5e97-\u5730\u5740': Value(dtype='string', id=None), '\u9152\u5e97-\u7535\u8bdd\u53f7\u7801': Value(dtype='string', id=None), '\u9152\u5e97-\u8bc4\u5206': Value(dtype='string', id=None), '\u7535\u8111-\u54c1\u724c': Value(dtype='string', id=None), '\u7535\u8111-\u4ea7\u54c1\u7c7b\u522b': Value(dtype='string', id=None), '\u7535\u8111-\u5206\u7c7b': Value(dtype='string', id=None), '\u7535\u8111-\u5185\u5b58\u5bb9\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u5c4f\u5e55\u5c3a\u5bf8': Value(dtype='string', id=None), '\u7535\u8111-CPU': Value(dtype='string', id=None), '\u7535\u8111-\u4ef7\u683c\u533a\u95f4': Value(dtype='string', id=None), '\u7535\u8111-\u7cfb\u5217': Value(dtype='string', id=None), '\u7535\u8111-\u5546\u54c1\u540d\u79f0': Value(dtype='string', id=None), '\u7535\u8111-\u7cfb\u7edf': Value(dtype='string', id=None), '\u7535\u8111-\u6e38\u620f\u6027\u80fd': Value(dtype='string', id=None), '\u7535\u8111-CPU\u578b\u53f7': Value(dtype='string', id=None), '\u7535\u8111-\u88f8\u673a\u91cd\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u663e\u5361\u7c7b\u522b': Value(dtype='string', id=None), '\u7535\u8111-\u663e\u5361\u578b\u53f7': Value(dtype='string', id=None), '\u7535\u8111-\u7279\u6027': Value(dtype='string', id=None), '\u7535\u8111-\u8272\u7cfb': Value(dtype='string', id=None), '\u7535\u8111-\u5f85\u673a\u65f6\u957f': Value(dtype='string', id=None), '\u7535\u8111-\u786c\u76d8\u5bb9\u91cf': Value(dtype='string', id=None), '\u7535\u8111-\u4ef7\u683c': Value(dtype='string', id=None), '\u706b\u8f66-\u51fa\u53d1\u5730': Value(dtype='string', id=None), '\u706b\u8f66-\u76ee\u7684\u5730': Value(dtype='string', id=None), '\u706b\u8f66-\u65e5\u671f': Value(dtype='string', id=None), '\u706b\u8f66-\u8f66\u578b': Value(dtype='string', id=None), '\u706b\u8f66-\u5750\u5e2d': Value(dtype='string', id=None), '\u706b\u8f66-\u8f66\u6b21\u4fe1\u606f': Value(dtype='string', id=None), '\u706b\u8f66-\u65f6\u957f': Value(dtype='string', id=None), '\u706b\u8f66-\u51fa\u53d1\u65f6\u95f4': Value(dtype='string', id=None), '\u706b\u8f66-\u5230\u8fbe\u65f6\u95f4': Value(dtype='string', id=None), '\u706b\u8f66-\u7968\u4ef7': Value(dtype='string', id=None), '\u98de\u673a-\u51fa\u53d1\u5730': Value(dtype='string', id=None), '\u98de\u673a-\u76ee\u7684\u5730': Value(dtype='string', id=None), '\u98de\u673a-\u65e5\u671f': Value(dtype='string', id=None), '\u98de\u673a-\u8231\u4f4d\u6863\u6b21': Value(dtype='string', id=None), '\u98de\u673a-\u822a\u73ed\u4fe1\u606f': Value(dtype='string', id=None), '\u98de\u673a-\u8d77\u98de\u65f6\u95f4': Value(dtype='string', id=None), '\u98de\u673a-\u5230\u8fbe\u65f6\u95f4': Value(dtype='string', id=None), '\u98de\u673a-\u7968\u4ef7': Value(dtype='string', id=None), '\u98de\u673a-\u51c6\u70b9\u7387': Value(dtype='string', id=None), '\u5929\u6c14-\u57ce\u5e02': Value(dtype='string', id=None), '\u5929\u6c14-\u65e5\u671f': Value(dtype='string', id=None), '\u5929\u6c14-\u5929\u6c14': Value(dtype='string', id=None), '\u5929\u6c14-\u6e29\u5ea6': Value(dtype='string', id=None), '\u5929\u6c14-\u98ce\u529b\u98ce\u5411': Value(dtype='string', id=None), '\u5929\u6c14-\u7d2b\u5916\u7ebf\u5f3a\u5ea6': Value(dtype='string', id=None), '\u7535\u5f71-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a': Value(dtype='string', id=None), '\u7535\u5f71-\u7c7b\u578b': Value(dtype='string', id=None), '\u7535\u5f71-\u5e74\u4ee3': Value(dtype='string', id=None), '\u7535\u5f71-\u4e3b\u6f14': Value(dtype='string', id=None), '\u7535\u5f71-\u5bfc\u6f14': Value(dtype='string', id=None), '\u7535\u5f71-\u7247\u540d': Value(dtype='string', id=None), '\u7535\u5f71-\u4e3b\u6f14\u540d\u5355': Value(dtype='string', id=None), '\u7535\u5f71-\u5177\u4f53\u4e0a\u6620\u65f6\u95f4': Value(dtype='string', id=None), '\u7535\u5f71-\u7247\u957f': Value(dtype='string', id=None), '\u7535\u5f71-\u8c46\u74e3\u8bc4\u5206': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5236\u7247\u56fd\u5bb6\/\u5730\u533a': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u7c7b\u578b': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5e74\u4ee3': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u4e3b\u6f14': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5bfc\u6f14': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u7247\u540d': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u4e3b\u6f14\u540d\u5355': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u9996\u64ad\u65f6\u95f4': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u96c6\u6570': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u5355\u96c6\u7247\u957f': Value(dtype='string', id=None), '\u7535\u89c6\u5267-\u8c46\u74e3\u8bc4\u5206': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u73ed\u53f7': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u96be\u5ea6': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u79d1\u76ee': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u5e74\u7ea7': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u533a\u57df': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6821\u533a': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65b9\u5f0f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u5f00\u59cb\u65e5\u671f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u7ed3\u675f\u65e5\u671f': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6bcf\u5468': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0a\u8bfe\u65f6\u95f4': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4e0b\u8bfe\u65f6\u95f4': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u65f6\u6bb5': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u6b21': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u65f6': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5ba4\u5730\u70b9': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5e08': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u4ef7\u683c': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u8bfe\u7a0b\u7f51\u5740': Value(dtype='string', id=None), '\u8f85\u5bfc\u73ed-\u6559\u5e08\u7f51\u5740': Value(dtype='string', id=None), '\u6c7d\u8f66-\u540d\u79f0': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u578b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u7ea7\u522b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u4f4d\u6570': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u8eab\u5c3a\u5bf8(mm)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5382\u5546': Value(dtype='string', id=None), '\u6c7d\u8f66-\u80fd\u6e90\u7c7b\u578b': Value(dtype='string', id=None), '\u6c7d\u8f66-\u53d1\u52a8\u673a\u6392\u91cf(L)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u53d1\u52a8\u673a\u9a6c\u529b(Ps)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u9a71\u52a8\u65b9\u5f0f': Value(dtype='string', id=None), '\u6c7d\u8f66-\u7efc\u5408\u6cb9\u8017(L\/100km)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u73af\u4fdd\u6807\u51c6': Value(dtype='string', id=None), '\u6c7d\u8f66-\u9a7e\u9a76\u8f85\u52a9\u5f71\u50cf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5de1\u822a\u7cfb\u7edf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u4ef7\u683c(\u4e07\u5143)': Value(dtype='string', id=None), '\u6c7d\u8f66-\u8f66\u7cfb': Value(dtype='string', id=None), '\u6c7d\u8f66-\u52a8\u529b\u6c34\u5e73': Value(dtype='string', id=None), '\u6c7d\u8f66-\u6cb9\u8017\u6c34\u5e73': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5012\u8f66\u5f71\u50cf': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5b9a\u901f\u5de1\u822a': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u6905\u52a0\u70ed': Value(dtype='string', id=None), '\u6c7d\u8f66-\u5ea7\u6905\u901a\u98ce': Value(dtype='string', id=None), '\u6c7d\u8f66-\u6240\u5c5e\u4ef7\u683c\u533a\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u540d\u79f0': Value(dtype='string', id=None), '\u533b\u9662-\u7b49\u7ea7': Value(dtype='string', id=None), '\u533b\u9662-\u7c7b\u522b': Value(dtype='string', id=None), '\u533b\u9662-\u6027\u8d28': Value(dtype='string', id=None), '\u533b\u9662-\u533a\u57df': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u5740': Value(dtype='string', id=None), '\u533b\u9662-\u7535\u8bdd': Value(dtype='string', id=None), '\u533b\u9662-\u6302\u53f7\u65f6\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u95e8\u8bca\u65f6\u95f4': Value(dtype='string', id=None), '\u533b\u9662-\u516c\u4ea4\u7ebf\u8def': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u94c1\u53ef\u8fbe': Value(dtype='string', id=None), '\u533b\u9662-\u5730\u94c1\u7ebf\u8def': Value(dtype='string', id=None), '\u533b\u9662-\u91cd\u70b9\u79d1\u5ba4': Value(dtype='string', id=None), '\u533b\u9662-CT': Value(dtype='string', id=None), '\u533b\u9662-3.0T MRI': Value(dtype='string', id=None), '\u533b\u9662-DSA': Value(dtype='string', id=None)}\r\n```\r\n\r\n<\/details>\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.1\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3637\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3636","id":1115362702,"node_id":"PR_kwDODunzps4xohMB","number":3636,"title":"Update index.rst","user":{"login":"VioletteLepercq","id":95622912,"node_id":"U_kgDOBbMXAA","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/95622912?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VioletteLepercq","html_url":"https:\/\/github.com\/VioletteLepercq","followers_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/followers","following_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/orgs","repos_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/repos","events_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VioletteLepercq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-26T18:43:09Z","updated_at":"2022-01-26T18:44:55Z","closed_at":"2022-01-26T18:44:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3636","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3636","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3636.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3636.patch","merged_at":"2022-01-26T18:44:54Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3636\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3635","id":1115333219,"node_id":"PR_kwDODunzps4xobAe","number":3635,"title":"Make `ted_talks_iwslt` dataset streamable","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-26T18:07:56Z","updated_at":"2022-01-27T13:40:55Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3635","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3635","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3635.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3635.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3635\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3634","id":1115133279,"node_id":"I_kwDODunzps5Cd5Vf","number":3634,"title":"Dataset.shuffle(seed=None) gives fixed row permutation","user":{"login":"elisno","id":18127060,"node_id":"MDQ6VXNlcjE4MTI3MDYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18127060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/elisno","html_url":"https:\/\/github.com\/elisno","followers_url":"https:\/\/api.github.com\/users\/elisno\/followers","following_url":"https:\/\/api.github.com\/users\/elisno\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/elisno\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/elisno\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/elisno\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/elisno\/orgs","repos_url":"https:\/\/api.github.com\/users\/elisno\/repos","events_url":"https:\/\/api.github.com\/users\/elisno\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/elisno\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-01-26T15:13:08Z","updated_at":"2022-01-27T18:16:07Z","closed_at":"2022-01-27T18:16:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nRepeated attempts to `shuffle` a dataset without specifying a seed give the same results.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n\r\n# Some toy example\r\ndata = datasets.Dataset.from_dict(\r\n {\"feature\": [1, 2, 3, 4, 5], \"label\": [\"a\", \"b\", \"c\", \"d\", \"e\"]}\r\n)\r\n\r\n# Doesn't work as expected\r\nprint(\"Shuffle dataset\")\r\nfor _ in range(3):\r\n print(data.shuffle(seed=None)[:])\r\n\r\n# This seems to work with pandas\r\nprint(\"\\nShuffle via pandas\")\r\nfor _ in range(3):\r\n df = data.to_pandas().sample(frac=1.0)\r\n print(datasets.Dataset.from_pandas(df, preserve_index=False)[:])\r\n\r\n```\r\n\r\n## Expected results\r\nI assumed that the default setting would initialize a new\/random state of a `np.random.BitGenerator` (see [docs](https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html?highlight=shuffle#datasets.Dataset.shuffle)).\r\n\r\nWouldn't that reshuffle the rows each time I call `data.shuffle()`?\r\n\r\n## Actual results\r\n\r\n```bash\r\nShuffle dataset\r\n{'feature': [5, 1, 3, 2, 4], 'label': ['e', 'a', 'c', 'b', 'd']}\r\n{'feature': [5, 1, 3, 2, 4], 'label': ['e', 'a', 'c', 'b', 'd']}\r\n{'feature': [5, 1, 3, 2, 4], 'label': ['e', 'a', 'c', 'b', 'd']}\r\n\r\nShuffle via pandas\r\n{'feature': [4, 2, 3, 1, 5], 'label': ['d', 'b', 'c', 'a', 'e']}\r\n{'feature': [2, 5, 3, 4, 1], 'label': ['b', 'e', 'c', 'd', 'a']}\r\n{'feature': [5, 2, 3, 1, 4], 'label': ['e', 'b', 'c', 'a', 'd']}\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.0\r\n- Platform: Linux-5.13.0-27-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.12\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3634\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3633","id":1115040174,"node_id":"PR_kwDODunzps4xng6E","number":3633,"title":"Mirror canonical datasets in prod","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-26T13:49:37Z","updated_at":"2022-01-26T13:56:21Z","closed_at":"2022-01-26T13:56:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3633","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3633","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3633.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3633.patch","merged_at":"2022-01-26T13:56:21Z"},"body":"Push the datasets changes to the Hub in production by setting `HF_USE_PROD=1`\r\n\r\nI also added a fix that makes the script ignore the json, csv, text, parquet and pandas dataset builders.\r\n\r\ncc @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3633\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3632","id":1115027185,"node_id":"I_kwDODunzps5Cdfbx","number":3632,"title":"Adding CC-100: Monolingual Datasets from Web Crawl Data (Datasets links are invalid)","user":{"login":"AnzorGozalishvili","id":55232459,"node_id":"MDQ6VXNlcjU1MjMyNDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55232459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AnzorGozalishvili","html_url":"https:\/\/github.com\/AnzorGozalishvili","followers_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/followers","following_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/orgs","repos_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/repos","events_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-01-26T13:35:37Z","updated_at":"2022-02-10T06:58:11Z","closed_at":"2022-02-10T06:58:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe dataset links are no longer valid for CC-100. It seems that the website which was keeping these files are no longer accessible and therefore this dataset became unusable. \r\nCheck out the dataset [homepage](http:\/\/data.statmt.org\/cc-100\/) which isn't accessible.\r\nAlso the URLs for dataset file per language isn't accessible: http:\/\/data.statmt.org\/cc-100\/.txt.xz (language codes: am, sr, ka, etc.)\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"cc100\", \"ka\")\r\n```\r\nIt throws 503 error.\r\n\r\n## Expected results\r\nIt should successfully download and load dataset but it throws an exception because the dataset files are no longer accessible.\r\n\r\n\r\n## Environment info\r\nRun from google colab. Just installed the library using pip:\r\n```!pip install -U datasets```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3632\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3631","id":1114833662,"node_id":"I_kwDODunzps5CcwL-","number":3631,"title":"Labels conflict when loading a local CSV file.","user":{"login":"pichljan","id":8571301,"node_id":"MDQ6VXNlcjg1NzEzMDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8571301?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pichljan","html_url":"https:\/\/github.com\/pichljan","followers_url":"https:\/\/api.github.com\/users\/pichljan\/followers","following_url":"https:\/\/api.github.com\/users\/pichljan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pichljan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pichljan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pichljan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pichljan\/orgs","repos_url":"https:\/\/api.github.com\/users\/pichljan\/repos","events_url":"https:\/\/api.github.com\/users\/pichljan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pichljan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-26T10:00:33Z","updated_at":"2022-02-11T23:02:31Z","closed_at":"2022-02-11T23:02:31Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to load a local CSV file with a separate file containing label names. It is successfully loaded for the first time, but when I try to load it again, there is a conflict between provided labels and the cached dataset info. Disabling caching globally and\/or using `download_mode=\"force_redownload\"` did not help.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset('csv', data_files='data\/my_data.csv',\r\n features=Features(text=Value(dtype='string'),\r\n label=ClassLabel(names_file='data\/my_data_labels.txt')))\r\n```\r\n`my_data.csv` file has the following structure:\r\n```\r\ntext,label\r\n\"example1\",0\r\n\"example2\",1\r\n...\r\n```\r\nand the `my_data_labels.txt` looks like this:\r\n```\r\nlabel1\r\nlabel2\r\n...\r\n```\r\n\r\n## Expected results\r\nSuccessfully loaded dataset.\r\n\r\n## Actual results\r\n```python\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1706, in load_dataset\r\n ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 766, in as_dataset\r\n datasets = utils.map_nested(\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 261, in map_nested\r\n mapped = [\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 262, in \r\n _single_map_nested((function, obj, types, None, True))\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 197, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 797, in _build_single_dataset\r\n ds = self._as_dataset(\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 872, in _as_dataset\r\n return Dataset(fingerprint=fingerprint, **dataset_kwargs)\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 638, in __init__\r\n inferred_features = Features.from_arrow_schema(arrow_table.schema)\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1242, in from_arrow_schema\r\n return Features.from_dict(metadata[\"info\"][\"features\"])\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1271, in from_dict\r\n obj = generate_from_dict(dic)\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1076, in generate_from_dict\r\n return {key: generate_from_dict(value) for key, value in obj.items()}\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1076, in \r\n return {key: generate_from_dict(value) for key, value in obj.items()}\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1083, in generate_from_dict\r\n return class_type(**{k: v for k, v in obj.items() if k in field_names})\r\n File \"\", line 7, in __init__\r\n File \"\/usr\/local\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 776, in __post_init__\r\n raise ValueError(\"Please provide either names or names_file but not both.\")\r\nValueError: Please provide either names or names_file but not both.\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.18.0\r\n- Python version: 3.8.2\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3631\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3630","id":1114578625,"node_id":"I_kwDODunzps5Cbx7B","number":3630,"title":"DuplicatedKeysError of NewsQA dataset","user":{"login":"StevenTang1998","id":37647985,"node_id":"MDQ6VXNlcjM3NjQ3OTg1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37647985?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/StevenTang1998","html_url":"https:\/\/github.com\/StevenTang1998","followers_url":"https:\/\/api.github.com\/users\/StevenTang1998\/followers","following_url":"https:\/\/api.github.com\/users\/StevenTang1998\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/StevenTang1998\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/StevenTang1998\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/StevenTang1998\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/StevenTang1998\/orgs","repos_url":"https:\/\/api.github.com\/users\/StevenTang1998\/repos","events_url":"https:\/\/api.github.com\/users\/StevenTang1998\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/StevenTang1998\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-26T03:05:49Z","updated_at":"2022-02-10T09:59:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After processing the dataset following official [NewsQA](https:\/\/github.com\/Maluuba\/newsqa), I used datasets to load it:\r\n```\r\na = load_dataset('newsqa', data_dir='news')\r\n```\r\nand the following error occurred: \r\n```\r\nUsing custom data configuration default-data_dir=news\r\nDownloading and preparing dataset newsqa\/default to \/root\/.cache\/huggingface\/datasets\/newsqa\/default-data_dir=news\/1.0.0\/b0b23e22d94a3d352ad9d75aff2b71375264a122fae301463079ee8595e05ab9...\r\nTraceback (most recent call last): \r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/builder.py\", line 1084, in _prepare_split\r\n writer.write(example, key)\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/arrow_writer.py\", line 442, in write\r\n self.check_duplicate_keys()\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/arrow_writer.py\", line 453, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: .\/cnn\/stories\/6a0f9c8a5d0c6e8949b37924163c92923fe5770d.story\r\nKeys should be unique and deterministic in nature\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/load.py\", line 1694, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/builder.py\", line 595, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/builder.py\", line 684, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/builder.py\", line 1086, in _prepare_split\r\n num_examples, num_bytes = writer.finalize()\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/arrow_writer.py\", line 524, in finalize\r\n self.check_duplicate_keys()\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/arrow_writer.py\", line 453, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: .\/cnn\/stories\/6a0f9c8a5d0c6e8949b37924163c92923fe5770d.story\r\nKeys should be unique and deterministic in nature\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3630\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3629","id":1113971575,"node_id":"PR_kwDODunzps4xkCZA","number":3629,"title":"Fix Hub repos update when there's a new release","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-25T14:39:45Z","updated_at":"2022-01-25T14:55:46Z","closed_at":"2022-01-25T14:55:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3629","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3629","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3629.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3629.patch","merged_at":"2022-01-25T14:55:46Z"},"body":"It was not listing the full list of datasets correctly\r\n\r\ncc @SBrandeis this is why it failed for 1.18.0\r\n\r\nWe should be good now !","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3629\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3628","id":1113930644,"node_id":"I_kwDODunzps5CZTuU","number":3628,"title":"Dataset Card Creator drops information for \"Additional Information\" Section","user":{"login":"dennlinger","id":26013491,"node_id":"MDQ6VXNlcjI2MDEzNDkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26013491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dennlinger","html_url":"https:\/\/github.com\/dennlinger","followers_url":"https:\/\/api.github.com\/users\/dennlinger\/followers","following_url":"https:\/\/api.github.com\/users\/dennlinger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dennlinger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dennlinger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dennlinger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dennlinger\/orgs","repos_url":"https:\/\/api.github.com\/users\/dennlinger\/repos","events_url":"https:\/\/api.github.com\/users\/dennlinger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dennlinger\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-25T14:06:17Z","updated_at":"2022-01-25T14:09:01Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"First of all, the card creator is a great addition and really helpful for streamlining dataset cards!\r\n\r\n## Describe the bug\r\nI encountered an inconvenient bug when entering \"Additional Information\" in the react app, which drops already entered text when switching to a previous section, and then back again to \"Additional Information\". I was able to reproduce the issue in both Firefox and Chrome, so I suspect a problem with the React logic that doesn't expect users to switch back in the final section. \r\nEdit: I'm also not sure whether this is the right place to open the bug report on, since it's not clear to me which particular project it belongs to, or where I could find associated source code.\r\n\r\n## Steps to reproduce the bug\r\n\r\n1. Navigate to the Section \"Additional Information\" in the [dataset card creator](https:\/\/huggingface.co\/datasets\/card-creator\/)\r\n2. Enter text in an arbitrary field, e.g., \"Dataset Curators\".\r\n3. Switch back to a previous section, like \"Dataset Creation\".\r\n4. When switching back again to \"Additional Information\", the text has been deleted.\r\n\r\nNotably, this behavior can be reproduced again and again, it's not just problematic for the first \"switch-back\" from Additional Information.\r\n\r\n## Expected results\r\nFor step 4, the previously entered information should still be present in the boxes, similar to the behavior to all other sections (switching back there works as expected)\r\n\r\n## Actual results\r\nThe text boxes are empty again, and previously entered text got deleted.\r\n\r\n## Environment info\r\n- `datasets` version: N\/A\r\n- Platform: Firefox 96.0 \/ Chrome 97.0\r\n- Python version: N\/A\r\n- PyArrow version: N\/A\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3628\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3627","id":1113556837,"node_id":"PR_kwDODunzps4xitGe","number":3627,"title":"Fix host URL in The Pile datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-25T08:11:28Z","updated_at":"2022-02-12T12:59:17Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3627","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3627","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3627.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3627.patch","merged_at":null},"body":"This PR fixes the host URL in The Pile datasets, once they have mirrored their data in another server.\r\n\r\nFix #3626.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3627\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3626","id":1113534436,"node_id":"I_kwDODunzps5CXy_k","number":3626,"title":"The Pile cannot connect to host","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-25T07:43:33Z","updated_at":"2022-01-25T07:43:34Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe Pile had issues with their previous host server and have mirrored its content to another server.\r\n\r\nThe new URL server should be updated.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3626\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3625","id":1113017522,"node_id":"I_kwDODunzps5CV0yy","number":3625,"title":"Add a metadata field for when source data was produced ","user":{"login":"davanstrien","id":8995957,"node_id":"MDQ6VXNlcjg5OTU5NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8995957?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/davanstrien","html_url":"https:\/\/github.com\/davanstrien","followers_url":"https:\/\/api.github.com\/users\/davanstrien\/followers","following_url":"https:\/\/api.github.com\/users\/davanstrien\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/davanstrien\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/davanstrien\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/davanstrien\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/davanstrien\/orgs","repos_url":"https:\/\/api.github.com\/users\/davanstrien\/repos","events_url":"https:\/\/api.github.com\/users\/davanstrien\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/davanstrien\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-24T18:52:39Z","updated_at":"2022-01-27T18:13:06Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nThe current problem is that information about when source data was produced is not easily visible. Though there are a variety of metadata fields available in the dataset viewer, time period information is not included. This feature request suggests making metadata relating to the time that the underlying *source* data was produced more prominent and outlines why this specific information is of particular importance, both in domain-specific historic research and more broadly.\r\n\r\n**Describe the solution you'd like**\r\n\r\nThere are a variety of metadata fields exposed in the dataset viewer (license, task categories, etc.) These fields make this metadata more prominent both for human users and as potentially machine-actionable information (for example, through the API). I would propose to add a metadata field that says when some underlying data was produced. For example, a dataset would be labelled as being produced between `1800-1900`. \r\n\r\n**Describe alternatives you've considered**\r\nThis information is sometimes available in the Datacard or a paper describing the dataset. However, it's often not that easy to identify or extract this information, particularly if you want to use this field as a filter to identify relevant datasets. \r\n\r\n**Additional context**\r\n\r\nI believe this feature is relevant for a number of reasons: \r\n- Increasingly, there is an interest in using historical data for training language models (for example, https:\/\/huggingface.co\/dbmdz\/bert-base-historic-dutch-cased), and datasets to support this task (for example, https:\/\/huggingface.co\/datasets\/bnl_newspapers). For these datasets, indicating the time periods covered is particularly relevant. \r\n- More broadly, time is likely a common source of domain drift. Datasets of movie reviews from the 90s may not work well for recent movie reviews. As the documentation and long-term management of ML data become more of a priority, quickly understanding the time when the underlying text (or other data types) is arguably more important. \r\n- time-series data: datasets are adding more support for time series data. Again, the periods covered might be particularly relevant here.\r\n\r\n**open questions**\r\n\r\n- I think some of my points above apply not only to the underlying data but also to annotations. As a result, there could also be an argument for encoding this information somewhere. However, I would argue (but could be persuaded otherwise) that this is probably less important for filtering. This type of context is already addressed in the datasheets template and often requires more narrative to discuss. \r\n- what level of granularity would make sense for this? e.g. assigning a decade, century or year?\r\n- how to encode this information? What formatting makes sense \r\n- what specific time to encode; a data range? (mean, modal, min, max value?) \r\n\r\nThis is a slightly amorphous feature request - I would be happy to discuss further\/try and propose a more concrete solution if this seems like something that could be worth considering. I realise this might also touch on other parts of the \ud83e\udd17 hubs ecosystem. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3625\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3623","id":1112835239,"node_id":"PR_kwDODunzps4xgWig","number":3623,"title":"Extend support for streaming datasets that use os.path.relpath","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-24T16:00:52Z","updated_at":"2022-02-04T14:03:55Z","closed_at":"2022-02-04T14:03:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3623","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3623","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3623.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3623.patch","merged_at":"2022-02-04T14:03:54Z"},"body":"This PR extends the support in streaming mode for datasets that use `os.path.relpath`, by patching that function.\r\n\r\nThis feature will also be useful to yield the relative path of audio or image files, within an archive or parent dir.\r\n\r\nClose #3622.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3623\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3622","id":1112831661,"node_id":"I_kwDODunzps5CVHat","number":3622,"title":"Extend support for streaming datasets that use os.path.relpath","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-24T15:58:23Z","updated_at":"2022-02-04T14:03:54Z","closed_at":"2022-02-04T14:03:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Extend support for streaming datasets that use `os.path.relpath`.\r\n\r\nThis feature will also be useful to yield the relative path of audio or image files.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3622\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3621","id":1112720434,"node_id":"I_kwDODunzps5CUsQy","number":3621,"title":"Consider adding `ipywidgets` as a dependency.","user":{"login":"koaning","id":1019791,"node_id":"MDQ6VXNlcjEwMTk3OTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1019791?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/koaning","html_url":"https:\/\/github.com\/koaning","followers_url":"https:\/\/api.github.com\/users\/koaning\/followers","following_url":"https:\/\/api.github.com\/users\/koaning\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/koaning\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/koaning\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/koaning\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/koaning\/orgs","repos_url":"https:\/\/api.github.com\/users\/koaning\/repos","events_url":"https:\/\/api.github.com\/users\/koaning\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/koaning\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-24T14:27:11Z","updated_at":"2022-01-24T15:14:15Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I install `datasets` in a fresh virtualenv with jupyterlab I always see this error. \r\n\r\n```\r\nImportError: IProgress not found. Please update jupyter and ipywidgets. See https:\/\/ipywidgets.readthedocs.io\/en\/stable\/user_install.html\r\n```\r\n\r\nIt's a bit of a nuisance, because I need to run shut down the jupyterlab server in order to install the required dependency. Might it be an option to just include it as a dependency here? ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3621\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3620","id":1112677252,"node_id":"PR_kwDODunzps4xf1J3","number":3620,"title":"Add Fon language tag","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-24T13:52:26Z","updated_at":"2022-02-04T14:04:36Z","closed_at":"2022-02-04T14:04:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3620","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3620","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3620.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3620.patch","merged_at":"2022-02-04T14:04:35Z"},"body":"Add Fon language tag to resources.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3620\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3619","id":1112611415,"node_id":"PR_kwDODunzps4xfnCQ","number":3619,"title":"fix meta in mls","user":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-24T12:54:38Z","updated_at":"2022-01-24T20:53:22Z","closed_at":"2022-01-24T20:53:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3619","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3619","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3619.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3619.patch","merged_at":"2022-01-24T20:53:21Z"},"body":"`monolingual` value of `m ultilinguality` param in yaml meta was changed to `multilingual` :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3619\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3618","id":1112123365,"node_id":"I_kwDODunzps5CSafl","number":3618,"title":"TIMIT Dataset not working with GPU","user":{"login":"TheSeamau5","id":3227869,"node_id":"MDQ6VXNlcjMyMjc4Njk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3227869?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TheSeamau5","html_url":"https:\/\/github.com\/TheSeamau5","followers_url":"https:\/\/api.github.com\/users\/TheSeamau5\/followers","following_url":"https:\/\/api.github.com\/users\/TheSeamau5\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TheSeamau5\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TheSeamau5\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TheSeamau5\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TheSeamau5\/orgs","repos_url":"https:\/\/api.github.com\/users\/TheSeamau5\/repos","events_url":"https:\/\/api.github.com\/users\/TheSeamau5\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TheSeamau5\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-24T03:26:03Z","updated_at":"2022-01-27T13:17:51Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am working trying to use the TIMIT dataset in order to fine-tune Wav2Vec2 model and I am unable to load the \"audio\" column from the dataset when working with a GPU. \r\n\r\nI am working on Amazon Sagemaker Studio, on the Python 3 (PyTorch 1.8 Python 3.6 GPU Optimized) environment, with a single ml.g4dn.xlarge instance (corresponds to a Tesla T4 GPU). \r\n\r\nI don't know if the issue is GPU related or Python environment related because everything works when I work off of the CPU Optimized environment with a non-GPU instance. My code also works on Google Colab with a GPU instance. \r\n\r\nThis issue is blocking because I cannot get the 'audio' column in any way due to this error, which means that I can't pass it to any functions. I later use the dataset.map function and that is where I originally noticed this error. \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ntimit_train = load_dataset('timit_asr', split='train')\r\nprint(timit_train['audio'])\r\n```\r\n\r\n## Expected results\r\nExpected to see inside the 'audio' column, which contains an 'array' nested field with the array data I actually need.\r\n\r\n## Actual results\r\n\r\nTraceback\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n----> 1 timit_train['audio']\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py in __getitem__(self, key)\r\n 1917 \"\"\"Can be used to index columns (by string names) or rows (by integer index or iterable of indices or bools).\"\"\"\r\n 1918 return self._getitem(\r\n-> 1919 key,\r\n 1920 )\r\n 1921 \r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py in _getitem(self, key, decoded, **kwargs)\r\n 1902 pa_subtable = query_table(self._data, key, indices=self._indices if self._indices is not None else None)\r\n 1903 formatted_output = format_table(\r\n-> 1904 pa_subtable, key, formatter=formatter, format_columns=format_columns, output_all_columns=output_all_columns\r\n 1905 )\r\n 1906 return formatted_output\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/formatting\/formatting.py in format_table(table, key, formatter, format_columns, output_all_columns)\r\n 529 python_formatter = PythonFormatter(features=None)\r\n 530 if format_columns is None:\r\n--> 531 return formatter(pa_table, query_type=query_type)\r\n 532 elif query_type == \"column\":\r\n 533 if key in format_columns:\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/formatting\/formatting.py in __call__(self, pa_table, query_type)\r\n 280 return self.format_row(pa_table)\r\n 281 elif query_type == \"column\":\r\n--> 282 return self.format_column(pa_table)\r\n 283 elif query_type == \"batch\":\r\n 284 return self.format_batch(pa_table)\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/formatting\/formatting.py in format_column(self, pa_table)\r\n 315 column = self.python_arrow_extractor().extract_column(pa_table)\r\n 316 if self.decoded:\r\n--> 317 column = self.python_features_decoder.decode_column(column, pa_table.column_names[0])\r\n 318 return column\r\n 319 \r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/formatting\/formatting.py in decode_column(self, column, column_name)\r\n 221 \r\n 222 def decode_column(self, column: list, column_name: str) -> list:\r\n--> 223 return self.features.decode_column(column, column_name) if self.features else column\r\n 224 \r\n 225 def decode_batch(self, batch: dict) -> dict:\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/features\/features.py in decode_column(self, column, column_name)\r\n 1337 return (\r\n 1338 [self[column_name].decode_example(value) if value is not None else None for value in column]\r\n-> 1339 if self._column_requires_decoding[column_name]\r\n 1340 else column\r\n 1341 )\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/features\/features.py in (.0)\r\n 1336 \"\"\"\r\n 1337 return (\r\n-> 1338 [self[column_name].decode_example(value) if value is not None else None for value in column]\r\n 1339 if self._column_requires_decoding[column_name]\r\n 1340 else column\r\n\r\n\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/features\/audio.py in decode_example(self, value)\r\n 85 dict\r\n 86 \"\"\"\r\n---> 87 path, file = (value[\"path\"], BytesIO(value[\"bytes\"])) if value[\"bytes\"] is not None else (value[\"path\"], None)\r\n 88 if path is None and file is None:\r\n 89 raise ValueError(f\"An audio sample should have one of 'path' or 'bytes' but both are None in {value}.\")\r\n\r\nTypeError: string indices must be integers\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.0\r\n- Platform: Linux-4.14.256-197.484.amzn2.x86_64-x86_64-with-debian-buster-sid\r\n- Python version: 3.6.13\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3618\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3617","id":1111938691,"node_id":"PR_kwDODunzps4xdb8K","number":3617,"title":"PR for the CFPB Consumer Complaints dataset","user":{"login":"kayvane1","id":42403093,"node_id":"MDQ6VXNlcjQyNDAzMDkz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42403093?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kayvane1","html_url":"https:\/\/github.com\/kayvane1","followers_url":"https:\/\/api.github.com\/users\/kayvane1\/followers","following_url":"https:\/\/api.github.com\/users\/kayvane1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kayvane1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kayvane1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kayvane1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kayvane1\/orgs","repos_url":"https:\/\/api.github.com\/users\/kayvane1\/repos","events_url":"https:\/\/api.github.com\/users\/kayvane1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kayvane1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2022-01-23T17:47:12Z","updated_at":"2022-02-07T21:08:31Z","closed_at":"2022-02-07T21:08:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3617","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3617","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3617.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3617.patch","merged_at":"2022-02-07T21:08:31Z"},"body":"Think I followed all the steps but please let me know if anything needs changing or any improvements I can make to the code quality","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3617\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3616","id":1111587861,"node_id":"PR_kwDODunzps4xcZMD","number":3616,"title":"Make streamable the BnL Historical Newspapers dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-22T14:52:36Z","updated_at":"2022-02-04T14:05:23Z","closed_at":"2022-02-04T14:05:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3616","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3616","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3616.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3616.patch","merged_at":"2022-02-04T14:05:21Z"},"body":"I've refactored the code in order to make the dataset streamable and to avoid it takes too long:\r\n- I've used `iter_files`\r\n\r\nClose #3615 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3616\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3615","id":1111576876,"node_id":"I_kwDODunzps5CQVEs","number":3615,"title":"Dataset BnL Historical Newspapers does not work in streaming mode","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2022-01-22T14:12:59Z","updated_at":"2022-02-04T14:05:21Z","closed_at":"2022-02-04T14:05:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen trying to load in streaming mode, it \"hangs\"...\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nds = load_dataset(\"bnl_newspapers\", split=\"train\", streaming=True)\r\n```\r\n\r\n## Expected results\r\nThe code should be optimized, so that it works fast in streaming mode.\r\n\r\nCC: @davanstrien \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3615\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3614","id":1110736657,"node_id":"PR_kwDODunzps4xZdCe","number":3614,"title":"Minor fixes","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-21T17:48:44Z","updated_at":"2022-01-24T12:45:49Z","closed_at":"2022-01-24T12:45:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3614","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3614","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3614.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3614.patch","merged_at":"2022-01-24T12:45:49Z"},"body":"This PR:\r\n* adds \"desc\" to the `ignore_kwargs` list in `Dataset.filter`\r\n* fixes the default value of `id` in `DatasetDict.prepare_for_task`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3614\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3613","id":1110684015,"node_id":"I_kwDODunzps5CM7Fv","number":3613,"title":"Files not updating in dataset viewer","user":{"login":"abidlabs","id":1778297,"node_id":"MDQ6VXNlcjE3NzgyOTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1778297?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abidlabs","html_url":"https:\/\/github.com\/abidlabs","followers_url":"https:\/\/api.github.com\/users\/abidlabs\/followers","following_url":"https:\/\/api.github.com\/users\/abidlabs\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abidlabs\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abidlabs\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abidlabs\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abidlabs\/orgs","repos_url":"https:\/\/api.github.com\/users\/abidlabs\/repos","events_url":"https:\/\/api.github.com\/users\/abidlabs\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abidlabs\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-21T16:47:20Z","updated_at":"2022-01-22T08:13:13Z","closed_at":"2022-01-22T08:13:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*name of the dataset*'\r\n\r\n**Link:**\r\nSome examples:\r\n* https:\/\/huggingface.co\/datasets\/abidlabs\/crowdsourced-speech4\r\n* https:\/\/huggingface.co\/datasets\/abidlabs\/test-audio-13\r\n\r\n*short description of the issue*\r\nIt seems that the dataset viewer is reading a cached version of the dataset and it is not updating to reflect new files that are added to the dataset. I get this error:\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/1778297\/150566660-30dc0dcd-18fd-4471-b70c-7c4bdc6a23c6.png)\r\n\r\n\r\nAm I the one who added this dataset? Yes","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3613\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3612","id":1110506466,"node_id":"PR_kwDODunzps4xYsvS","number":3612,"title":"wikifix","user":{"login":"apergo-ai","id":68908804,"node_id":"MDQ6VXNlcjY4OTA4ODA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68908804?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apergo-ai","html_url":"https:\/\/github.com\/apergo-ai","followers_url":"https:\/\/api.github.com\/users\/apergo-ai\/followers","following_url":"https:\/\/api.github.com\/users\/apergo-ai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apergo-ai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apergo-ai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apergo-ai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apergo-ai\/orgs","repos_url":"https:\/\/api.github.com\/users\/apergo-ai\/repos","events_url":"https:\/\/api.github.com\/users\/apergo-ai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apergo-ai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-01-21T14:05:11Z","updated_at":"2022-02-03T17:58:16Z","closed_at":"2022-02-03T17:58:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3612","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3612","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3612.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3612.patch","merged_at":null},"body":"This should get the wikipedia dataloading script back up and running - at least I hope so (tested with language ff and ii)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3612\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3611","id":1110399096,"node_id":"I_kwDODunzps5CL1h4","number":3611,"title":"Indexing bug after dataset.select()","user":{"login":"kamalkraj","id":17096858,"node_id":"MDQ6VXNlcjE3MDk2ODU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17096858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kamalkraj","html_url":"https:\/\/github.com\/kamalkraj","followers_url":"https:\/\/api.github.com\/users\/kamalkraj\/followers","following_url":"https:\/\/api.github.com\/users\/kamalkraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kamalkraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kamalkraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kamalkraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kamalkraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/kamalkraj\/repos","events_url":"https:\/\/api.github.com\/users\/kamalkraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kamalkraj\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-21T12:09:30Z","updated_at":"2022-01-27T18:16:22Z","closed_at":"2022-01-27T18:16:22Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n \r\nDataset indexing is not working as expected after `dataset.select(range(100))`\r\n\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nimport datasets\r\n\r\ntask_to_keys = {\r\n \"cola\": (\"sentence\", None),\r\n \"mnli\": (\"premise\", \"hypothesis\"),\r\n \"mrpc\": (\"sentence1\", \"sentence2\"),\r\n \"qnli\": (\"question\", \"sentence\"),\r\n \"qqp\": (\"question1\", \"question2\"),\r\n \"rte\": (\"sentence1\", \"sentence2\"),\r\n \"sst2\": (\"sentence\", None),\r\n \"stsb\": (\"sentence1\", \"sentence2\"),\r\n \"wnli\": (\"sentence1\", \"sentence2\"),\r\n}\r\n\r\ntask_name = \"sst2\"\r\nraw_datasets = datasets.load_dataset(\"glue\", task_name)\r\n\r\n\r\ntrain_dataset = raw_datasets[\"train\"]\r\n\r\nprint(\"before select: \",train_dataset[-2:])\r\n# before select: {'sentence': ['a patient viewer ', 'this new jangle of noise , mayhem and stupidity must be a serious contender for the title . '], 'label': [1, 0], 'idx': [67347, 67348]}\r\n\r\ntrain_dataset = train_dataset.select(range(100))\r\n\r\nprint(\"after select: \",train_dataset[-2:])\r\n# after select: {'sentence': [], 'label': [], 'idx': []}\r\n\r\n```\r\n\r\nlink to colab: https:\/\/colab.research.google.com\/drive\/1LngeRC9f0jE7eSQ4Kh1cIeb411lRXQD-?usp=sharing\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\nshowing 98, 99 index data\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\nempty\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.17.0\r\n- Platform: Linux-5.4.144+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3611\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3610","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3610\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3610\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3610\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3610","id":1109777314,"node_id":"I_kwDODunzps5CJdui","number":3610,"title":"Checksum error when trying to load amazon_review dataset","user":{"login":"rifoag","id":32415171,"node_id":"MDQ6VXNlcjMyNDE1MTcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32415171?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rifoag","html_url":"https:\/\/github.com\/rifoag","followers_url":"https:\/\/api.github.com\/users\/rifoag\/followers","following_url":"https:\/\/api.github.com\/users\/rifoag\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rifoag\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rifoag\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rifoag\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rifoag\/orgs","repos_url":"https:\/\/api.github.com\/users\/rifoag\/repos","events_url":"https:\/\/api.github.com\/users\/rifoag\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rifoag\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-20T21:20:32Z","updated_at":"2022-01-21T13:22:31Z","closed_at":"2022-01-21T13:22:31Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\nI am getting the issue when trying to load dataset using\r\n```\r\ndataset = load_dataset(\"amazon_polarity\")\r\n```\r\n\r\n## Expected results\r\ndataset loaded\r\n\r\n## Actual results\r\n```\r\n---------------------------------------------------------------------------\r\nNonMatchingChecksumError Traceback (most recent call last)\r\n in ()\r\n----> 1 dataset = load_dataset(\"amazon_polarity\")\r\n 2 dataset.set_format(type='pandas')\r\n 3 content_series = dataset['train']['content']\r\n 4 label_series = dataset['train']['label']\r\n 5 df = pd.concat([content_series, label_series], axis=1)\r\n\r\n3 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 38 if len(bad_urls) > 0:\r\n 39 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 40 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 41 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 42 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/drive.google.com\/u\/0\/uc?id=0Bz8a_Dbh9QhbaW12WVVZS2drcnM&export=download']\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.17.0\r\n- Platform: Google colab\r\n- Python version: 3.7.12","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3610\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3610\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3609","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3609\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3609\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3609\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3609","id":1109579112,"node_id":"PR_kwDODunzps4xVrsG","number":3609,"title":"Fixes to pubmed dataset download function","user":{"login":"spacemanidol","id":3886120,"node_id":"MDQ6VXNlcjM4ODYxMjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3886120?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/spacemanidol","html_url":"https:\/\/github.com\/spacemanidol","followers_url":"https:\/\/api.github.com\/users\/spacemanidol\/followers","following_url":"https:\/\/api.github.com\/users\/spacemanidol\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/spacemanidol\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/spacemanidol\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/spacemanidol\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/spacemanidol\/orgs","repos_url":"https:\/\/api.github.com\/users\/spacemanidol\/repos","events_url":"https:\/\/api.github.com\/users\/spacemanidol\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/spacemanidol\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-20T17:31:35Z","updated_at":"2022-01-24T14:01:05Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3609","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3609","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3609.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3609.patch","merged_at":null},"body":"Pubmed has updated its settings for 2022 and thus existing download script does not work. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3609\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3609\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3608","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3608\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3608\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3608\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3608","id":1109310981,"node_id":"I_kwDODunzps5CHr4F","number":3608,"title":"Add support for continuous metrics (RMSE, MAE)","user":{"login":"ck37","id":50770,"node_id":"MDQ6VXNlcjUwNzcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50770?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ck37","html_url":"https:\/\/github.com\/ck37","followers_url":"https:\/\/api.github.com\/users\/ck37\/followers","following_url":"https:\/\/api.github.com\/users\/ck37\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ck37\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ck37\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ck37\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ck37\/orgs","repos_url":"https:\/\/api.github.com\/users\/ck37\/repos","events_url":"https:\/\/api.github.com\/users\/ck37\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ck37\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-20T13:35:36Z","updated_at":"2022-01-24T17:16:32Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nI am uploading our dataset and models for the \"Constructing interval measures\" method we've developed, which uses item response theory to convert multiple discrete labels into a continuous spectrum for hate speech. Once we have this outcome our NLP models conduct regression rather than classification, so binary metrics are not relevant. The only continuous metrics available at https:\/\/huggingface.co\/metrics are pearson & spearman correlation, which don't ensure that the prediction is on the same scale as the outcome.\r\n\r\n**Describe the solution you'd like**\r\nI would like to be able to tag our models on the Hub with the following metrics:\r\n- RMSE\r\n- MAE\r\n\r\n**Describe alternatives you've considered**\r\n\r\nI don't know if there are any alternatives.\r\n\r\n**Additional context**\r\nOur preprint is available here: https:\/\/arxiv.org\/abs\/2009.10277 . We are making it available for use in Jigsaw's Toxic Severity Rating Kaggle competition: https:\/\/www.kaggle.com\/c\/jigsaw-toxic-severity-rating\/overview . I have our first model uploaded to the Hub at https:\/\/huggingface.co\/ucberkeley-dlab\/hate-measure-roberta-large\r\n\r\nThanks,\r\nChris\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3608\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3608\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3607","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3607\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3607\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3607\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3607","id":1109218370,"node_id":"PR_kwDODunzps4xUgrR","number":3607,"title":"Add MIT Scene Parsing Benchmark","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-20T12:03:07Z","updated_at":"2022-01-20T12:03:07Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3607","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3607","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3607.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3607.patch","merged_at":null},"body":"Add MIT Scene Parsing Benchmark (a subset of ADE20k).\r\n\r\nTODOs:\r\n* [ ] add dummy data\r\n* [ ] add dataset card\r\n* [ ] generate `dataset_info.json`\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3607\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3607\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3606","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3606\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3606\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3606\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3606","id":1108918701,"node_id":"I_kwDODunzps5CGMGt","number":3606,"title":"audio column not saved correctly after resampling","user":{"login":"laphang","id":24724502,"node_id":"MDQ6VXNlcjI0NzI0NTAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24724502?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/laphang","html_url":"https:\/\/github.com\/laphang","followers_url":"https:\/\/api.github.com\/users\/laphang\/followers","following_url":"https:\/\/api.github.com\/users\/laphang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/laphang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/laphang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/laphang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/laphang\/orgs","repos_url":"https:\/\/api.github.com\/users\/laphang\/repos","events_url":"https:\/\/api.github.com\/users\/laphang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/laphang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-20T06:37:10Z","updated_at":"2022-01-23T01:41:01Z","closed_at":"2022-01-23T01:24:14Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAfter resampling the audio column, saving with save_to_disk doesn't seem to save with the correct type. \r\n\r\n## Steps to reproduce the bug\r\n- load a subset of common voice dataset (48Khz)\r\n- resample audio column to 16Khz\r\n- save with save_to_disk()\r\n- load with load_from_disk()\r\n\r\n## Expected results\r\nI expected that after saving the data, and then loading it back in, the audio column has the correct dataset.Audio type (i.e. same as before saving it)\r\n{'accent': Value(dtype='string', id=None),\r\n 'age': Value(dtype='string', id=None),\r\n 'audio': Audio(sampling_rate=16000, mono=True, _storage_dtype='string', id=None),\r\n 'client_id': Value(dtype='string', id=None),\r\n 'down_votes': Value(dtype='int64', id=None),\r\n 'gender': Value(dtype='string', id=None),\r\n 'locale': Value(dtype='string', id=None),\r\n 'path': Value(dtype='string', id=None),\r\n 'segment': Value(dtype='string', id=None),\r\n 'sentence': Value(dtype='string', id=None),\r\n 'up_votes': Value(dtype='int64', id=None)}\r\n\r\n## Actual results\r\nAudio column does not have the right type\r\n{'accent': Value(dtype='string', id=None),\r\n 'age': Value(dtype='string', id=None),\r\n 'audio': {'bytes': Value(dtype='binary', id=None),\r\n 'path': Value(dtype='string', id=None)},\r\n 'client_id': Value(dtype='string', id=None),\r\n 'down_votes': Value(dtype='int64', id=None),\r\n 'gender': Value(dtype='string', id=None),\r\n 'locale': Value(dtype='string', id=None),\r\n 'path': Value(dtype='string', id=None),\r\n 'segment': Value(dtype='string', id=None),\r\n 'sentence': Value(dtype='string', id=None),\r\n 'up_votes': Value(dtype='int64', id=None)}\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.17.0\r\n- Platform: linux\r\n- Python version:\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3606\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3606\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3605","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3605\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3605\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3605\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3605","id":1108738561,"node_id":"PR_kwDODunzps4xS9rX","number":3605,"title":"Adding Turkic X-WMT evaluation set for machine translation","user":{"login":"mirzakhalov","id":26018417,"node_id":"MDQ6VXNlcjI2MDE4NDE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26018417?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mirzakhalov","html_url":"https:\/\/github.com\/mirzakhalov","followers_url":"https:\/\/api.github.com\/users\/mirzakhalov\/followers","following_url":"https:\/\/api.github.com\/users\/mirzakhalov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mirzakhalov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mirzakhalov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mirzakhalov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mirzakhalov\/orgs","repos_url":"https:\/\/api.github.com\/users\/mirzakhalov\/repos","events_url":"https:\/\/api.github.com\/users\/mirzakhalov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mirzakhalov\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2022-01-20T01:40:29Z","updated_at":"2022-01-31T09:50:57Z","closed_at":"2022-01-31T09:50:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3605","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3605","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3605.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3605.patch","merged_at":"2022-01-31T09:50:57Z"},"body":"This dataset is a human-translated evaluation set for MT crowdsourced and provided by the [Turkic Interlingua ](turkic-interlingua.org) community. It contains eval sets for 8 Turkic languages covering 88 language directions. Languages being covered are:\r\n\r\nAzerbaijani (az)\r\nBashkir (ba)\r\nEnglish (en)\r\nKarakalpak (kaa)\r\nKazakh (kk)\r\nKirghiz (ky)\r\nRussian (ru)\r\nTurkish (tr)\r\nSakha (sah)\r\nUzbek (uz)\r\n\r\nMore info about the corpus is here: [https:\/\/github.com\/turkic-interlingua\/til-mt\/tree\/master\/xwmt](https:\/\/github.com\/turkic-interlingua\/til-mt\/tree\/master\/xwmt)\r\n\r\nA paper describing the test set is here: [https:\/\/arxiv.org\/abs\/2109.04593](https:\/\/arxiv.org\/abs\/2109.04593)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3605\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3605\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3604","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3604\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3604\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3604\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3604","id":1108477316,"node_id":"I_kwDODunzps5CEgWE","number":3604,"title":"Dataset Viewer not showing Previews for Private Datasets","user":{"login":"abidlabs","id":1778297,"node_id":"MDQ6VXNlcjE3NzgyOTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1778297?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abidlabs","html_url":"https:\/\/github.com\/abidlabs","followers_url":"https:\/\/api.github.com\/users\/abidlabs\/followers","following_url":"https:\/\/api.github.com\/users\/abidlabs\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abidlabs\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abidlabs\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abidlabs\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abidlabs\/orgs","repos_url":"https:\/\/api.github.com\/users\/abidlabs\/repos","events_url":"https:\/\/api.github.com\/users\/abidlabs\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abidlabs\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-19T19:29:26Z","updated_at":"2022-01-20T08:11:28Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for 'abidlabs\/test-audio-13'\r\n\r\nIt seems that the dataset viewer does not show previews for `private` datasets, even for the user who's private dataset it is. See [1] for example. If I change the visibility to public, then it does show, but it would be useful to have the viewer even for private datasets.\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/1778297\/150200515-93ff1545-11fd-4793-be64-6bed3cd895e2.png)\r\n\r\n**Link:** \r\n\r\n[1] https:\/\/huggingface.co\/datasets\/abidlabs\/test-audio-13\r\n\r\n**Am I the one who added this dataset?** \r\n\r\nYes\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3604\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3604\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3603","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3603\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3603\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3603\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3603","id":1108392141,"node_id":"PR_kwDODunzps4xR1ih","number":3603,"title":"Add British Library books dataset","user":{"login":"davanstrien","id":8995957,"node_id":"MDQ6VXNlcjg5OTU5NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8995957?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/davanstrien","html_url":"https:\/\/github.com\/davanstrien","followers_url":"https:\/\/api.github.com\/users\/davanstrien\/followers","following_url":"https:\/\/api.github.com\/users\/davanstrien\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/davanstrien\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/davanstrien\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/davanstrien\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/davanstrien\/orgs","repos_url":"https:\/\/api.github.com\/users\/davanstrien\/repos","events_url":"https:\/\/api.github.com\/users\/davanstrien\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/davanstrien\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-01-19T17:53:05Z","updated_at":"2022-01-31T17:22:51Z","closed_at":"2022-01-31T17:01:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3603","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3603","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3603.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3603.patch","merged_at":"2022-01-31T17:01:49Z"},"body":"This pull request adds a dataset of text from digitised (primarily 19th Century) books from the British Library. This collection has previously been used for training language models, e.g. https:\/\/github.com\/dbmdz\/clef-hipe\/blob\/main\/hlms.md. It would be nice to make this dataset more accessible for others to use through datasets. \r\n\r\nThis is still a WIP but I wanted to get some initial feedback in particular; I wanted to check:\r\n- I am handling the use of `iter_archive` correctly - I intend to ensure that `dl_manager.download` gets the complete list of URLs to download upfront, so the progress bar knows how much is left to download and then to pass through the `gen_kwargs` a list of downloaded zip archives wrapped in `iter_archive`. I am unsure if there is a more elegant approach for this?\r\n- the number of configs: I have aimed to keep this limited - there are a lot of URLs covering the entire dataset, but I have tried to base the configs on what I believe the majority of people will want to they are not presented with too many options - I am happy to hear suggestions for changing this\r\n\r\nIf there are other glaring omissions or mistakes, I'd be happy to hear them. If this approach seems sensible in general, I will finish all the remaining TODOs, generate dummy_data, etc. \r\n ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3603\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3603\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3602","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3602\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3602\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3602\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3602","id":1108247870,"node_id":"PR_kwDODunzps4xRXVm","number":3602,"title":"Update url for conll2003","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-19T15:35:04Z","updated_at":"2022-01-20T16:23:03Z","closed_at":"2022-01-19T15:43:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3602","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3602","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3602.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3602.patch","merged_at":"2022-01-19T15:43:53Z"},"body":"Following https:\/\/github.com\/huggingface\/datasets\/issues\/3582 I'm changing the download URL of the conll2003 data files, since the previous host doesn't have the authorization to redistribute the data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3602\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3602\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3601","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3601\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3601\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3601\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3601","id":1108207131,"node_id":"PR_kwDODunzps4xROtF","number":3601,"title":"Add conll2003 licensing","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-19T15:00:41Z","updated_at":"2022-01-19T17:17:28Z","closed_at":"2022-01-19T17:17:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3601","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3601","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3601.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3601.patch","merged_at":"2022-01-19T17:17:28Z"},"body":"Following https:\/\/github.com\/huggingface\/datasets\/issues\/3582, this PR updates the licensing section of the CoNLL2003 dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3601\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3601\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3600","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3600\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3600\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3600\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3600","id":1108131878,"node_id":"PR_kwDODunzps4xQ-vt","number":3600,"title":"Use old url for conll2003","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-19T13:56:49Z","updated_at":"2022-01-19T14:16:28Z","closed_at":"2022-01-19T14:16:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3600","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3600","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3600.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3600.patch","merged_at":"2022-01-19T14:16:28Z"},"body":"As reported in https:\/\/github.com\/huggingface\/datasets\/issues\/3582 the CoNLL2003 data files are not available in the master branch of the repo that used to host them. \r\n\r\nFor now we can use the URL from an older commit to access the data files","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3600\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3600\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3599","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3599\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3599\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3599\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3599","id":1108111607,"node_id":"I_kwDODunzps5CDHD3","number":3599,"title":"The `add_column()` method does not work if used on dataset sliced with `select()`","user":{"login":"ThGouzias","id":59422506,"node_id":"MDQ6VXNlcjU5NDIyNTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59422506?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ThGouzias","html_url":"https:\/\/github.com\/ThGouzias","followers_url":"https:\/\/api.github.com\/users\/ThGouzias\/followers","following_url":"https:\/\/api.github.com\/users\/ThGouzias\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ThGouzias\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ThGouzias\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ThGouzias\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ThGouzias\/orgs","repos_url":"https:\/\/api.github.com\/users\/ThGouzias\/repos","events_url":"https:\/\/api.github.com\/users\/ThGouzias\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ThGouzias\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-19T13:36:50Z","updated_at":"2022-01-28T15:35:57Z","closed_at":"2022-01-28T15:35:57Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello, I posted this as a question on the forums ([here](https:\/\/discuss.huggingface.co\/t\/add-column-does-not-work-if-used-on-dataset-sliced-with-select\/13893)): \r\n\r\nI have a dataset with 2000 entries\r\n\r\n> dataset = Dataset.from_dict({'colA': list(range(2000))})\r\n\r\nand from which I want to extract the first one thousand rows, create a new dataset with these and also add a new column to it:\r\n\r\n> dataset2 = dataset.select(list(range(1000)))\r\n> final_dataset = dataset2.add_column('colB', list(range(1000)))\r\n\r\nThis gives an error\r\n>ArrowInvalid: Added column's length must match table's length. Expected length 2000 but got length 1000\r\n\r\nSo it looks like even though it is a dataset with 1000 rows, it \"remembers\" the shape of the one it was sliced from.\r\n\r\n## Actual results\r\n```\r\nArrowInvalid Traceback (most recent call last)\r\n in \r\n----> 1 final_dataset = dataset2.add_column('colB', list(range(1000)))\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 468 }\r\n 469 # apply actual function\r\n--> 470 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 471 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 472 # re-apply format to the output\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 404 # Call actual function\r\n 405 \r\n--> 406 out = func(self, *args, **kwargs)\r\n 407 \r\n 408 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in add_column(self, name, column, new_fingerprint)\r\n 3343 column_table = InMemoryTable.from_pydict({name: column})\r\n 3344 # Concatenate tables horizontally\r\n-> 3345 table = ConcatenationTable.from_tables([self._data, column_table], axis=1)\r\n 3346 # Update features\r\n 3347 info = self.info.copy()\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/table.py in from_tables(cls, tables, axis)\r\n 729 table_blocks = to_blocks(table)\r\n 730 blocks = _extend_blocks(blocks, table_blocks, axis=axis)\r\n--> 731 return cls.from_blocks(blocks)\r\n 732 \r\n 733 @property\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/table.py in from_blocks(cls, blocks)\r\n 668 @classmethod\r\n 669 def from_blocks(cls, blocks: TableBlockContainer) -> \"ConcatenationTable\":\r\n--> 670 blocks = cls._consolidate_blocks(blocks)\r\n 671 if isinstance(blocks, TableBlock):\r\n 672 table = blocks\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/table.py in _consolidate_blocks(cls, blocks)\r\n 664 return cls._merge_blocks(blocks, axis=0)\r\n 665 else:\r\n--> 666 return cls._merge_blocks(blocks)\r\n 667 \r\n 668 @classmethod\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/table.py in _merge_blocks(cls, blocks, axis)\r\n 650 merged_blocks += list(block_group)\r\n 651 else: # both\r\n--> 652 merged_blocks = [cls._merge_blocks(row_block, axis=1) for row_block in blocks]\r\n 653 if all(len(row_block) == 1 for row_block in merged_blocks):\r\n 654 merged_blocks = cls._merge_blocks(\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/table.py in (.0)\r\n 650 merged_blocks += list(block_group)\r\n 651 else: # both\r\n--> 652 merged_blocks = [cls._merge_blocks(row_block, axis=1) for row_block in blocks]\r\n 653 if all(len(row_block) == 1 for row_block in merged_blocks):\r\n 654 merged_blocks = cls._merge_blocks(\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/table.py in _merge_blocks(cls, blocks, axis)\r\n 647 for is_in_memory, block_group in groupby(blocks, key=lambda x: isinstance(x, InMemoryTable)):\r\n 648 if is_in_memory:\r\n--> 649 block_group = [InMemoryTable(cls._concat_blocks(list(block_group), axis=axis))]\r\n 650 merged_blocks += list(block_group)\r\n 651 else: # both\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/table.py in _concat_blocks(blocks, axis)\r\n 626 else:\r\n 627 for name, col in zip(table.column_names, table.columns):\r\n--> 628 pa_table = pa_table.append_column(name, col)\r\n 629 return pa_table\r\n 630 else:\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.append_column()\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.add_column()\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowInvalid: Added column's length must match table's length. Expected length 2000 but got length 1000\r\n```\r\n\r\nA solution provided by @mariosasko is to use `dataset2.flatten_indices()` after the `select()` and before attempting to add the new column:\r\n\r\n> dataset = Dataset.from_dict({'colA': list(range(2000))})\r\n> dataset2 = dataset.select(list(range(1000)))\r\n> dataset2 = dataset2.flatten_indices()\r\n> final_dataset = dataset2.add_column('colB', list(range(1000)))\r\n\r\nwhich works.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.13.2 (note: also checked with version 1.17.0, still the same error)\r\n- Platform: Ubuntu 20.04.3\r\n- Python version: 3.8.10\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3599\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3599\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3598","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3598\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3598\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3598\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3598","id":1108107199,"node_id":"I_kwDODunzps5CDF-_","number":3598,"title":"Readme info not being parsed to show on Dataset card page","user":{"login":"davidcanovas","id":79796807,"node_id":"MDQ6VXNlcjc5Nzk2ODA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79796807?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/davidcanovas","html_url":"https:\/\/github.com\/davidcanovas","followers_url":"https:\/\/api.github.com\/users\/davidcanovas\/followers","following_url":"https:\/\/api.github.com\/users\/davidcanovas\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/davidcanovas\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/davidcanovas\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/davidcanovas\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/davidcanovas\/orgs","repos_url":"https:\/\/api.github.com\/users\/davidcanovas\/repos","events_url":"https:\/\/api.github.com\/users\/davidcanovas\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/davidcanovas\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-01-19T13:32:29Z","updated_at":"2022-01-21T10:20:01Z","closed_at":"2022-01-21T10:20:01Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe info contained in the README.md file is not being shown in the dataset main page. Basic info and table of contents are properly formatted in the README.\r\n\r\n## Steps to reproduce the bug\r\n\r\n# Sample code to reproduce the bug\r\nThe README file is this one: https:\/\/huggingface.co\/datasets\/softcatala\/Tilde-MODEL-Catalan\/blob\/main\/README.md\r\n\r\n## Expected results\r\nREADME info should appear in the Dataset card page.\r\n\r\n## Actual results\r\nNothing is shown. However, labels are parsed and shown successfully.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3598\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3598\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3597","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3597\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3597\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3597\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3597","id":1108092864,"node_id":"I_kwDODunzps5CDCfA","number":3597,"title":"ERROR: File \"setup.py\" or \"setup.cfg\" not found. Directory cannot be installed in editable mode: \/content","user":{"login":"amitkml","id":49492030,"node_id":"MDQ6VXNlcjQ5NDkyMDMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49492030?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amitkml","html_url":"https:\/\/github.com\/amitkml","followers_url":"https:\/\/api.github.com\/users\/amitkml\/followers","following_url":"https:\/\/api.github.com\/users\/amitkml\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amitkml\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amitkml\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amitkml\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amitkml\/orgs","repos_url":"https:\/\/api.github.com\/users\/amitkml\/repos","events_url":"https:\/\/api.github.com\/users\/amitkml\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amitkml\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-19T13:19:28Z","updated_at":"2022-01-20T13:26:50Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Bug\r\nThe install of streaming dataset is giving following error.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n! git clone https:\/\/github.com\/huggingface\/datasets.git\r\n! cd datasets\r\n! pip install -e \".[streaming]\"\r\n```\r\n\r\n## Actual results\r\nCloning into 'datasets'...\r\nremote: Enumerating objects: 50816, done.\r\nremote: Counting objects: 100% (2356\/2356), done.\r\nremote: Compressing objects: 100% (1606\/1606), done.\r\nremote: Total 50816 (delta 834), reused 1741 (delta 525), pack-reused 48460\r\nReceiving objects: 100% (50816\/50816), 72.47 MiB | 27.68 MiB\/s, done.\r\nResolving deltas: 100% (22541\/22541), done.\r\nChecking out files: 100% (6722\/6722), done.\r\nERROR: File \"setup.py\" or \"setup.cfg\" not found. Directory cannot be installed in editable mode: \/content\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3597\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3597\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3596","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3596\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3596\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3596\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3596","id":1107345338,"node_id":"I_kwDODunzps5CAL-6","number":3596,"title":"Loss of cast `Image` feature on certain dataset method","user":{"login":"davanstrien","id":8995957,"node_id":"MDQ6VXNlcjg5OTU5NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8995957?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/davanstrien","html_url":"https:\/\/github.com\/davanstrien","followers_url":"https:\/\/api.github.com\/users\/davanstrien\/followers","following_url":"https:\/\/api.github.com\/users\/davanstrien\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/davanstrien\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/davanstrien\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/davanstrien\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/davanstrien\/orgs","repos_url":"https:\/\/api.github.com\/users\/davanstrien\/repos","events_url":"https:\/\/api.github.com\/users\/davanstrien\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/davanstrien\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2022-01-18T20:44:01Z","updated_at":"2022-01-21T18:07:28Z","closed_at":"2022-01-21T18:07:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nWhen an a column is cast to an `Image` feature, the cast type appears to be lost during certain operations. I first noticed this when using the `push_to_hub` method on a dataset that contained urls pointing to images which had been cast to an `image`. This also happens when using select on a dataset which has had a column cast to an `Image`.\r\n\r\nI suspect this might be related to https:\/\/github.com\/huggingface\/datasets\/pull\/3556 but I don't believe that pull request fixes this issue. \r\n\r\n## Steps to reproduce the bug\r\n\r\nAn example of casting a url to an image followed by using the `select` method:\r\n\r\n```python\r\nfrom datasets import Dataset\r\nfrom datasets import features\r\nurl = \"https:\/\/cf.ltkcdn.net\/cats\/images\/std-lg\/246866-1200x816-grey-white-kitten.webp\"\r\ndata_dict = {\"url\": [url]*2}\r\ndataset = Dataset.from_dict(data_dict)\r\ndataset = dataset.cast_column('url',features.Image())\r\nsample = dataset.select([1])\r\n```\r\n\r\n[example notebook](https:\/\/gist.github.com\/davanstrien\/06e53f4383c28ae77ce1b30d0eaf0d70#file-potential_casting_bug-ipynb)\r\n\r\n## Expected results\r\nThe cast value is maintained when further methods are applied to the dataset. \r\n\r\n## Actual results\r\n```python\r\n---------------------------------------------------------------------------\r\n\r\nValueError Traceback (most recent call last)\r\n\r\n in ()\r\n----> 1 sample = dataset.select([1])\r\n\r\n4 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 487 }\r\n 488 # apply actual function\r\n--> 489 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 490 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 491 # re-apply format to the output\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 409 # Call actual function\r\n 410 \r\n--> 411 out = func(self, *args, **kwargs)\r\n 412 \r\n 413 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in select(self, indices, keep_in_memory, indices_cache_file_name, writer_batch_size, new_fingerprint)\r\n 2772 )\r\n 2773 else:\r\n-> 2774 return self._new_dataset_with_indices(indices_buffer=buf_writer.getvalue(), fingerprint=new_fingerprint)\r\n 2775 \r\n 2776 @transmit_format\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in _new_dataset_with_indices(self, indices_cache_file_name, indices_buffer, fingerprint)\r\n 2688 split=self.split,\r\n 2689 indices_table=indices_table,\r\n-> 2690 fingerprint=fingerprint,\r\n 2691 )\r\n 2692 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in __init__(self, arrow_table, info, split, indices_table, fingerprint)\r\n 664 if self.info.features.type != inferred_features.type:\r\n 665 raise ValueError(\r\n--> 666 f\"External features info don't match the dataset:\\nGot\\n{self.info.features}\\nwith type\\n{self.info.features.type}\\n\\nbut expected something like\\n{inferred_features}\\nwith type\\n{inferred_features.type}\"\r\n 667 )\r\n 668 \r\n\r\nValueError: External features info don't match the dataset:\r\nGot\r\n{'url': Image(id=None)}\r\nwith type\r\nstruct>>\r\n\r\nbut expected something like\r\n{'url': Value(dtype='string', id=None)}\r\nwith type\r\nstruct\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.17.1.dev0\r\n- Platform: Linux-5.4.144+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3596\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3596\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3595","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3595\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3595\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3595\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3595","id":1107260527,"node_id":"PR_kwDODunzps4xOIxH","number":3595,"title":"Add ImageNet toy datasets from fastai","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-18T19:03:35Z","updated_at":"2022-01-19T11:33:36Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3595","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3595","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3595.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3595.patch","merged_at":null},"body":"Adds the ImageNet toy datasets from FastAI: Imagenette, Imagewoof and Imagewang.\r\n\r\nTODOs:\r\n* [ ] add dummy data\r\n* [ ] add dataset card\r\n* [ ] generate `dataset_info.json`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3595\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3595\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3594","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3594\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3594\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3594\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3594","id":1107174619,"node_id":"PR_kwDODunzps4xN3Kk","number":3594,"title":"fix multiple language downloading in mC4","user":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-18T17:25:19Z","updated_at":"2022-01-19T11:22:57Z","closed_at":"2022-01-18T19:10:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3594","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3594","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3594.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3594.patch","merged_at":"2022-01-18T19:10:22Z"},"body":"If we try to access multiple languages of the [mC4 dataset](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/mc4), it will throw an error. For example, if we do \r\n```python\r\nmc4_subset_two_langs = load_dataset(\"mc4\", languages=[\"st\", \"su\"])\r\n```\r\nwe got\r\n```\r\nFileNotFoundError: Couldn't find file at https:\/\/huggingface.co\/datasets\/allenai\/c4\/resolve\/1ddc917116b730e1859edef32896ec5c16be51d0\/multilingual\/c4-st+su.tfrecord-00000-of-00002.json.gz\r\n```\r\nNow it should work. Check it (from the root dir of a project): \r\n```python\r\nmc4_subset_two_langs = load_dataset(\".\/datasets\/mc4\/\", languages=[\"st\", \"su\"])\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3594\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3594\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3593","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3593\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3593\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3593\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3593","id":1107070852,"node_id":"PR_kwDODunzps4xNhTu","number":3593,"title":"Update README.md","user":{"login":"borgr","id":6416600,"node_id":"MDQ6VXNlcjY0MTY2MDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6416600?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borgr","html_url":"https:\/\/github.com\/borgr","followers_url":"https:\/\/api.github.com\/users\/borgr\/followers","following_url":"https:\/\/api.github.com\/users\/borgr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borgr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borgr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borgr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borgr\/orgs","repos_url":"https:\/\/api.github.com\/users\/borgr\/repos","events_url":"https:\/\/api.github.com\/users\/borgr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borgr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-18T15:52:16Z","updated_at":"2022-01-20T17:14:53Z","closed_at":"2022-01-20T17:14:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3593","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3593","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3593.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3593.patch","merged_at":"2022-01-20T17:14:52Z"},"body":"Towards license of Tweet Eval parts","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3593\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3593\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3592","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3592\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3592\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3592\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3592","id":1107026723,"node_id":"PR_kwDODunzps4xNYIW","number":3592,"title":"Add QuickDraw dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-18T15:13:39Z","updated_at":"2022-01-18T15:13:39Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3592","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3592","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3592.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3592.patch","merged_at":null},"body":"Add the QuickDraw dataset.\r\n\r\nTODOs:\r\n* [ ] add dummy data\r\n* [ ] add dataset card\r\n* [ ] generate `dataset_info.json`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3592\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3592\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3591","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3591\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3591\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3591\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3591","id":1106928613,"node_id":"PR_kwDODunzps4xNDoB","number":3591,"title":"Add support for time, date, duration, and decimal dtypes","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-18T13:46:05Z","updated_at":"2022-01-31T18:29:34Z","closed_at":"2022-01-20T17:37:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3591","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3591","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3591.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3591.patch","merged_at":"2022-01-20T17:37:33Z"},"body":"Add support for the pyarrow time (maps to `datetime.time` in python), date (maps to `datetime.time` in python), duration (maps to `datetime.timedelta` in python), and decimal (maps to `decimal.decimal` in python) dtypes. This should be helpful when writing scripts for time-series datasets. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3591\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3591\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3590","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3590\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3590\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3590\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3590","id":1106784860,"node_id":"PR_kwDODunzps4xMlGg","number":3590,"title":"Update ANLI README.md","user":{"login":"borgr","id":6416600,"node_id":"MDQ6VXNlcjY0MTY2MDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6416600?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borgr","html_url":"https:\/\/github.com\/borgr","followers_url":"https:\/\/api.github.com\/users\/borgr\/followers","following_url":"https:\/\/api.github.com\/users\/borgr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borgr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borgr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borgr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borgr\/orgs","repos_url":"https:\/\/api.github.com\/users\/borgr\/repos","events_url":"https:\/\/api.github.com\/users\/borgr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borgr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-18T11:22:53Z","updated_at":"2022-01-20T16:58:41Z","closed_at":"2022-01-20T16:58:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3590","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3590","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3590.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3590.patch","merged_at":"2022-01-20T16:58:41Z"},"body":"Update license and little things concerning ANLI","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3590\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3590\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3589","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3589\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3589\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3589\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3589","id":1106766114,"node_id":"PR_kwDODunzps4xMhGp","number":3589,"title":"Pin torchmetrics to fix the COMET test","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-18T11:03:49Z","updated_at":"2022-01-18T11:04:56Z","closed_at":"2022-01-18T11:04:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3589","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3589","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3589.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3589.patch","merged_at":"2022-01-18T11:04:55Z"},"body":"Torchmetrics 0.7.0 got released and has issues with `transformers` (see https:\/\/github.com\/PyTorchLightning\/metrics\/issues\/770)\r\n\r\nI'm pinning it to 0.6.0 in the CI, since 0.7.0 makes the COMET metric test fail. COMET requires torchmetrics==0.6.0 anyway.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3589\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3589\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3588","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3588\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3588\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3588\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3588","id":1106749000,"node_id":"PR_kwDODunzps4xMdiC","number":3588,"title":"Update HellaSwag README.md","user":{"login":"borgr","id":6416600,"node_id":"MDQ6VXNlcjY0MTY2MDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6416600?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borgr","html_url":"https:\/\/github.com\/borgr","followers_url":"https:\/\/api.github.com\/users\/borgr\/followers","following_url":"https:\/\/api.github.com\/users\/borgr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borgr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borgr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borgr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borgr\/orgs","repos_url":"https:\/\/api.github.com\/users\/borgr\/repos","events_url":"https:\/\/api.github.com\/users\/borgr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borgr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-18T10:46:15Z","updated_at":"2022-01-20T16:57:43Z","closed_at":"2022-01-20T16:57:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3588","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3588","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3588.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3588.patch","merged_at":"2022-01-20T16:57:43Z"},"body":"Adding information from the git repo and paper that were missing","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3588\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3588\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3587","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3587\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3587\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3587\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3587","id":1106719182,"node_id":"I_kwDODunzps5B9zHO","number":3587,"title":"No module named 'fsspec.archive'","user":{"login":"shuuchen","id":13246825,"node_id":"MDQ6VXNlcjEzMjQ2ODI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13246825?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shuuchen","html_url":"https:\/\/github.com\/shuuchen","followers_url":"https:\/\/api.github.com\/users\/shuuchen\/followers","following_url":"https:\/\/api.github.com\/users\/shuuchen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shuuchen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shuuchen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shuuchen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shuuchen\/orgs","repos_url":"https:\/\/api.github.com\/users\/shuuchen\/repos","events_url":"https:\/\/api.github.com\/users\/shuuchen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shuuchen\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-18T10:17:01Z","updated_at":"2022-01-18T10:33:10Z","closed_at":"2022-01-18T10:33:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nCannot import datasets after installation.\r\n\r\n## Steps to reproduce the bug\r\n```shell\r\n$ python\r\nPython 3.9.7 (default, Sep 16 2021, 13:09:58)\r\n[GCC 7.5.0] :: Anaconda, Inc. on linux\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n>>> import datasets\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/shuchen\/miniconda3\/envs\/hf\/lib\/python3.9\/site-packages\/datasets\/__init__.py\", line 34, in \r\n from .arrow_dataset import Dataset, concatenate_datasets\r\n File \"\/home\/shuchen\/miniconda3\/envs\/hf\/lib\/python3.9\/site-packages\/datasets\/arrow_dataset.py\", line 61, in \r\n from .arrow_writer import ArrowWriter, OptimizedTypedSequence\r\n File \"\/home\/shuchen\/miniconda3\/envs\/hf\/lib\/python3.9\/site-packages\/datasets\/arrow_writer.py\", line 28, in \r\n from .features import (\r\n File \"\/home\/shuchen\/miniconda3\/envs\/hf\/lib\/python3.9\/site-packages\/datasets\/features\/__init__.py\", line 2, in \r\n from .audio import Audio\r\n File \"\/home\/shuchen\/miniconda3\/envs\/hf\/lib\/python3.9\/site-packages\/datasets\/features\/audio.py\", line 7, in \r\n from ..utils.streaming_download_manager import xopen\r\n File \"\/home\/shuchen\/miniconda3\/envs\/hf\/lib\/python3.9\/site-packages\/datasets\/utils\/streaming_download_manager.py\", line 18, in \r\n from ..filesystems import COMPRESSION_FILESYSTEMS\r\n File \"\/home\/shuchen\/miniconda3\/envs\/hf\/lib\/python3.9\/site-packages\/datasets\/filesystems\/__init__.py\", line 6, in \r\n from . import compression\r\n File \"\/home\/shuchen\/miniconda3\/envs\/hf\/lib\/python3.9\/site-packages\/datasets\/filesystems\/compression.py\", line 5, in \r\n from fsspec.archive import AbstractArchiveFileSystem\r\nModuleNotFoundError: No module named 'fsspec.archive'\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3587\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3587\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3586","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3586\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3586\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3586\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3586","id":1106455672,"node_id":"I_kwDODunzps5B8yx4","number":3586,"title":"Revisit `enable\/disable_` toggle function prefix","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-18T04:09:55Z","updated_at":"2022-01-18T04:09:55Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As discussed in https:\/\/github.com\/huggingface\/transformers\/pull\/15167, we should revisit the `enable\/disable_` toggle function prefix, potentially in favor of `set_enabled_`. Concretely, this translates to\r\n\r\n- De-deprecating `disable_progress_bar()`\r\n- Adding `enable_progress_bar()`\r\n- On the caching side, adding `enable_caching` and `disable_caching`\r\n\r\nAdditional decisions have to be made with regards to the existing `set_enabled_X` functions; that is, whether to keep them as is or deprecate them in favor of the aforementioned functions. \r\n\r\ncc @mariosasko @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3586\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3586\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3585","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3585\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3585\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3585\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3585","id":1105821470,"node_id":"I_kwDODunzps5B6X8e","number":3585,"title":"Datasets streaming + map doesn't work for `Audio`","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":1935892865,"node_id":"MDU6TGFiZWwxOTM1ODkyODY1","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/duplicate","name":"duplicate","color":"cfd3d7","default":true,"description":"This issue or pull request already exists"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-17T12:55:42Z","updated_at":"2022-01-20T13:28:00Z","closed_at":"2022-01-20T13:28:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen using audio datasets in streaming mode, applying a `map(...)` before iterating leads to an error as the key `array` does not exist anymore.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\"common_voice\", \"en\", streaming=True, split=\"train\")\r\n\r\n\r\ndef map_fn(batch):\r\n print(\"audio keys\", batch[\"audio\"].keys())\r\n batch[\"audio\"] = batch[\"audio\"][\"array\"][:100]\r\n return batch\r\n\r\n\r\nds = ds.map(map_fn) \r\n\r\nsample = next(iter(ds))\r\n```\r\n\r\nI think the audio is somehow decoded before `.map(...)` is actually called.\r\n\r\n## Expected results\r\n\r\nIMO, the above code snippet should work.\r\n\r\n## Actual results\r\n\r\n```bash\r\naudio keys dict_keys(['path', 'bytes'])\r\nTraceback (most recent call last):\r\n File \".\/run_audio.py\", line 15, in \r\n sample = next(iter(ds))\r\n File \"\/home\/patrick\/python_bin\/datasets\/iterable_dataset.py\", line 341, in __iter__\r\n for key, example in self._iter():\r\n File \"\/home\/patrick\/python_bin\/datasets\/iterable_dataset.py\", line 338, in _iter\r\n yield from ex_iterable\r\n File \"\/home\/patrick\/python_bin\/datasets\/iterable_dataset.py\", line 192, in __iter__\r\n yield key, self.function(example)\r\n File \".\/run_audio.py\", line 9, in map_fn\r\n batch[\"input\"] = batch[\"audio\"][\"array\"][:100]\r\nKeyError: 'array'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.17.1.dev0\r\n- Platform: Linux-5.3.0-64-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.12\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3585\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3585\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3584","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3584\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3584\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3584\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3584","id":1105231768,"node_id":"I_kwDODunzps5B4H-Y","number":3584,"title":"https:\/\/huggingface.co\/datasets\/huggingface\/transformers-metadata","user":{"login":"ecankirkic","id":37082592,"node_id":"MDQ6VXNlcjM3MDgyNTky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37082592?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ecankirkic","html_url":"https:\/\/github.com\/ecankirkic","followers_url":"https:\/\/api.github.com\/users\/ecankirkic\/followers","following_url":"https:\/\/api.github.com\/users\/ecankirkic\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ecankirkic\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ecankirkic\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ecankirkic\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ecankirkic\/orgs","repos_url":"https:\/\/api.github.com\/users\/ecankirkic\/repos","events_url":"https:\/\/api.github.com\/users\/ecankirkic\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ecankirkic\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-17T00:18:14Z","updated_at":"2022-01-17T09:21:54Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*name of the dataset*'\r\n\r\n**Link:** *link to the dataset viewer page*\r\n\r\n*short description of the issue*\r\n\r\nAm I the one who added this dataset ? Yes-No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3584\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3584\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3583","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3583\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3583\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3583\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3583","id":1105195144,"node_id":"I_kwDODunzps5B3_CI","number":3583,"title":"Add The Medical Segmentation Decathlon Dataset","user":{"login":"omarespejel","id":4755430,"node_id":"MDQ6VXNlcjQ3NTU0MzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4755430?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/omarespejel","html_url":"https:\/\/github.com\/omarespejel","followers_url":"https:\/\/api.github.com\/users\/omarespejel\/followers","following_url":"https:\/\/api.github.com\/users\/omarespejel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/omarespejel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/omarespejel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/omarespejel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/omarespejel\/orgs","repos_url":"https:\/\/api.github.com\/users\/omarespejel\/repos","events_url":"https:\/\/api.github.com\/users\/omarespejel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/omarespejel\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-16T21:42:25Z","updated_at":"2022-02-09T19:39:16Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *The Medical Segmentation Decathlon Dataset*\r\n- **Description:** The underlying data set was designed to explore the axis of difficulties typically encountered when dealing with medical images, such as small data sets, unbalanced labels, multi-site data, and small objects. \r\n- **Paper:** [link to the dataset paper if available](https:\/\/arxiv.org\/abs\/2106.05735)\r\n- **Data:** http:\/\/medicaldecathlon.com\/\r\n- **Motivation:** Hugging Face seeks to democratize ML for society. One of the growing niches within ML is the ML + Medicine community. Key data sets will help increase the supply of HF resources for starting an initial community.\r\n\r\n(cc @osanseviero @abidlabs )\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3583\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3583\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3582","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3582\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3582\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3582\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3582","id":1104877303,"node_id":"I_kwDODunzps5B2xb3","number":3582,"title":"conll 2003 dataset source url is no longer valid","user":{"login":"rcanand","id":303900,"node_id":"MDQ6VXNlcjMwMzkwMA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/303900?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rcanand","html_url":"https:\/\/github.com\/rcanand","followers_url":"https:\/\/api.github.com\/users\/rcanand\/followers","following_url":"https:\/\/api.github.com\/users\/rcanand\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rcanand\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rcanand\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rcanand\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rcanand\/orgs","repos_url":"https:\/\/api.github.com\/users\/rcanand\/repos","events_url":"https:\/\/api.github.com\/users\/rcanand\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rcanand\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2022-01-15T23:04:17Z","updated_at":"2022-01-21T16:57:32Z","closed_at":"2022-01-21T16:57:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nLoading `conll2003` dataset fails because it was removed (just yesterday 1\/14\/2022) from the location it is looking for.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n\r\nload_dataset(\"conll2003\")\r\n```\r\n\r\n## Expected results\r\nThe dataset should load.\r\n\r\n## Actual results\r\nIt is looking for the dataset at `https:\/\/github.com\/davidsbatista\/NER-datasets\/raw\/master\/CONLL2003\/train.txt` but it was removed from there yesterday (see [commit](https:\/\/github.com\/davidsbatista\/NER-datasets\/commit\/9d8f45cc7331569af8eb3422bbe1c97cbebd5690) that removed the file and related [issue](https:\/\/github.com\/davidsbatista\/NER-datasets\/issues\/8)). \r\n\r\n- We should replace this with an alternate valid location.\r\n- this is being referenced in the huggingface course chapter 7 [colab notebook](https:\/\/colab.research.google.com\/github\/huggingface\/notebooks\/blob\/master\/course\/chapter7\/section2_pt.ipynb), which is also broken.\r\n\r\n```python\r\nFileNotFoundError Traceback (most recent call last)\r\n in ()\r\n 1 from datasets import load_dataset\r\n 2 \r\n----> 3 raw_datasets = load_dataset(\"conll2003\")\r\n\r\n11 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag, max_retries, use_auth_token, ignore_url_params)\r\n 610 )\r\n 611 elif response is not None and response.status_code == 404:\r\n--> 612 raise FileNotFoundError(f\"Couldn't find file at {url}\")\r\n 613 _raise_if_offline_mode_is_enabled(f\"Tried to reach {url}\")\r\n 614 if head_error is not None:\r\n\r\nFileNotFoundError: Couldn't find file at https:\/\/github.com\/davidsbatista\/NER-datasets\/raw\/master\/CONLL2003\/train.txt\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform:\r\n- Python version:\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3582\/reactions","total_count":5,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":5,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3582\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3581","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3581\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3581\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3581\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3581","id":1104857822,"node_id":"I_kwDODunzps5B2sre","number":3581,"title":"Unable to create a dataset from a parquet file in S3","user":{"login":"regCode","id":18012903,"node_id":"MDQ6VXNlcjE4MDEyOTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18012903?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/regCode","html_url":"https:\/\/github.com\/regCode","followers_url":"https:\/\/api.github.com\/users\/regCode\/followers","following_url":"https:\/\/api.github.com\/users\/regCode\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/regCode\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/regCode\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/regCode\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/regCode\/orgs","repos_url":"https:\/\/api.github.com\/users\/regCode\/repos","events_url":"https:\/\/api.github.com\/users\/regCode\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/regCode\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-15T21:34:16Z","updated_at":"2022-01-21T16:58:56Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nTrying to create a dataset from a parquet file in S3.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport s3fs\r\nfrom datasets import Dataset\r\n\r\ns3 = s3fs.S3FileSystem(anon=False)\r\n\r\nwith s3.open(PATH_LTR_TOY_CLEAN_DATASET, 'rb') as s3file:\r\n dataset = Dataset.from_parquet(s3file)\r\n```\r\n\r\n## Expected results\r\nA new Dataset object\r\n\r\n## Actual results\r\n\r\n```AttributeError: 'S3File' object has no attribute 'decode'```\r\n```\r\nAttributeError Traceback (most recent call last)\r\n in \r\n 5 \r\n 6 with s3.open(PATH_LTR_TOY_CLEAN_DATASET, 'rb') as s3file:\r\n----> 7 dataset = Dataset.from_parquet(s3file)\r\n\r\n\/databricks\/python\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in from_parquet(path_or_paths, split, features, cache_dir, keep_in_memory, columns, **kwargs)\r\n 907 from .io.parquet import ParquetDatasetReader\r\n 908 \r\n--> 909 return ParquetDatasetReader(\r\n 910 path_or_paths,\r\n 911 split=split,\r\n\r\n\/databricks\/python\/lib\/python3.8\/site-packages\/datasets\/io\/parquet.py in __init__(self, path_or_paths, split, features, cache_dir, keep_in_memory, **kwargs)\r\n 28 path_or_paths = path_or_paths if isinstance(path_or_paths, dict) else {self.split: path_or_paths}\r\n 29 hash = _PACKAGED_DATASETS_MODULES[\"parquet\"][1]\r\n---> 30 self.builder = Parquet(\r\n 31 cache_dir=cache_dir,\r\n 32 data_files=path_or_paths,\r\n\r\n\/databricks\/python\/lib\/python3.8\/site-packages\/datasets\/builder.py in __init__(self, cache_dir, name, hash, base_path, info, features, use_auth_token, namespace, data_files, data_dir, **config_kwargs)\r\n 246 \r\n 247 if data_files is not None and not isinstance(data_files, DataFilesDict):\r\n--> 248 data_files = DataFilesDict.from_local_or_remote(\r\n 249 sanitize_patterns(data_files), base_path=base_path, use_auth_token=use_auth_token\r\n 250 )\r\n\r\n\/databricks\/python\/lib\/python3.8\/site-packages\/datasets\/data_files.py in from_local_or_remote(cls, patterns, base_path, allowed_extensions, use_auth_token)\r\n 576 for key, patterns_for_key in patterns.items():\r\n 577 out[key] = (\r\n--> 578 DataFilesList.from_local_or_remote(\r\n 579 patterns_for_key,\r\n 580 base_path=base_path,\r\n\r\n\/databricks\/python\/lib\/python3.8\/site-packages\/datasets\/data_files.py in from_local_or_remote(cls, patterns, base_path, allowed_extensions, use_auth_token)\r\n 544 ) -> \"DataFilesList\":\r\n 545 base_path = base_path if base_path is not None else str(Path().resolve())\r\n--> 546 data_files = resolve_patterns_locally_or_by_urls(base_path, patterns, allowed_extensions)\r\n 547 origin_metadata = _get_origin_metadata_locally_or_by_urls(data_files, use_auth_token=use_auth_token)\r\n 548 return cls(data_files, origin_metadata)\r\n\r\n\/databricks\/python\/lib\/python3.8\/site-packages\/datasets\/data_files.py in resolve_patterns_locally_or_by_urls(base_path, patterns, allowed_extensions)\r\n 191 data_files = []\r\n 192 for pattern in patterns:\r\n--> 193 if is_remote_url(pattern):\r\n 194 data_files.append(Url(pattern))\r\n 195 else:\r\n\r\n\/databricks\/python\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py in is_remote_url(url_or_filename)\r\n 115 \r\n 116 def is_remote_url(url_or_filename: str) -> bool:\r\n--> 117 parsed = urlparse(url_or_filename)\r\n 118 return parsed.scheme in (\"http\", \"https\", \"s3\", \"gs\", \"hdfs\", \"ftp\")\r\n 119 \r\n\r\n\/usr\/lib\/python3.8\/urllib\/parse.py in urlparse(url, scheme, allow_fragments)\r\n 370 Note that we don't break the components up in smaller bits\r\n 371 (e.g. netloc is a single string) and we don't expand % escapes.\"\"\"\r\n--> 372 url, scheme, _coerce_result = _coerce_args(url, scheme)\r\n 373 splitresult = urlsplit(url, scheme, allow_fragments)\r\n 374 scheme, netloc, url, query, fragment = splitresult\r\n\r\n\/usr\/lib\/python3.8\/urllib\/parse.py in _coerce_args(*args)\r\n 122 if str_input:\r\n 123 return args + (_noop,)\r\n--> 124 return _decode_args(args) + (_encode_result,)\r\n 125 \r\n 126 # Result objects are more helpful than simple tuples\r\n\r\n\/usr\/lib\/python3.8\/urllib\/parse.py in _decode_args(args, encoding, errors)\r\n 106 def _decode_args(args, encoding=_implicit_encoding,\r\n 107 errors=_implicit_errors):\r\n--> 108 return tuple(x.decode(encoding, errors) if x else '' for x in args)\r\n 109 \r\n 110 def _coerce_args(*args):\r\n\r\n\/usr\/lib\/python3.8\/urllib\/parse.py in (.0)\r\n 106 def _decode_args(args, encoding=_implicit_encoding,\r\n 107 errors=_implicit_errors):\r\n--> 108 return tuple(x.decode(encoding, errors) if x else '' for x in args)\r\n 109 \r\n 110 def _coerce_args(*args):\r\n\r\nAttributeError: 'S3File' object has no attribute 'decode'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.17.0\r\n- Platform: Ubuntu 20.04.3 LTS\r\n- Python version: 3.8.10\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3581\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3581\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3580","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3580\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3580\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3580\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3580","id":1104663242,"node_id":"I_kwDODunzps5B19LK","number":3580,"title":"Bug in wiki bio load","user":{"login":"tuhinjubcse","id":3104771,"node_id":"MDQ6VXNlcjMxMDQ3NzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3104771?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tuhinjubcse","html_url":"https:\/\/github.com\/tuhinjubcse","followers_url":"https:\/\/api.github.com\/users\/tuhinjubcse\/followers","following_url":"https:\/\/api.github.com\/users\/tuhinjubcse\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tuhinjubcse\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tuhinjubcse\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tuhinjubcse\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tuhinjubcse\/orgs","repos_url":"https:\/\/api.github.com\/users\/tuhinjubcse\/repos","events_url":"https:\/\/api.github.com\/users\/tuhinjubcse\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tuhinjubcse\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-01-15T10:04:33Z","updated_at":"2022-01-31T08:38:09Z","closed_at":"2022-01-31T08:38:09Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"\r\nwiki_bio is failing to load because of a failing drive link . Can someone fix this ?\r\n\r\n\r\n![7E90023B-A3B1-4930-BA25-45CCCB4E1710](https:\/\/user-images.githubusercontent.com\/3104771\/149617870-5a32a2da-2c78-483b-bff6-d7534215a423.png)\r\n\r\n\r\n![653C1C76-C725-4A04-A0D8-084373BA612F](https:\/\/user-images.githubusercontent.com\/3104771\/149617875-ef0e30b0-b76e-48cf-b3eb-93ba8e6e5465.png)\r\na","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3580\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3580\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3579","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3579\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3579\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3579\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3579","id":1103451118,"node_id":"PR_kwDODunzps4xBmY4","number":3579,"title":"Add Text2log Dataset","user":{"login":"apergo-ai","id":68908804,"node_id":"MDQ6VXNlcjY4OTA4ODA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68908804?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apergo-ai","html_url":"https:\/\/github.com\/apergo-ai","followers_url":"https:\/\/api.github.com\/users\/apergo-ai\/followers","following_url":"https:\/\/api.github.com\/users\/apergo-ai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apergo-ai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apergo-ai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apergo-ai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apergo-ai\/orgs","repos_url":"https:\/\/api.github.com\/users\/apergo-ai\/repos","events_url":"https:\/\/api.github.com\/users\/apergo-ai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apergo-ai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-14T10:45:01Z","updated_at":"2022-01-20T17:09:44Z","closed_at":"2022-01-20T17:09:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3579","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3579","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3579.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3579.patch","merged_at":"2022-01-20T17:09:44Z"},"body":"Adding the text2log dataset used for training FOL sentence translating models","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3579\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3579\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3578","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3578\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3578\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3578\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3578","id":1103403287,"node_id":"I_kwDODunzps5BxJkX","number":3578,"title":"label information get lost after parquet serialization","user":{"login":"Tudyx","id":56633664,"node_id":"MDQ6VXNlcjU2NjMzNjY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56633664?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Tudyx","html_url":"https:\/\/github.com\/Tudyx","followers_url":"https:\/\/api.github.com\/users\/Tudyx\/followers","following_url":"https:\/\/api.github.com\/users\/Tudyx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Tudyx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Tudyx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Tudyx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Tudyx\/orgs","repos_url":"https:\/\/api.github.com\/users\/Tudyx\/repos","events_url":"https:\/\/api.github.com\/users\/Tudyx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Tudyx\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-14T10:10:38Z","updated_at":"2022-01-25T07:21:41Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIn *dataset_info.json* file, information about the label get lost after the dataset serialization.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n# normal save\r\ndataset = load_dataset('glue', 'sst2', split='train')\r\ndataset.save_to_disk(\"normal_save\")\r\n\r\n\r\n# save after parquet serialization\r\ndataset.to_parquet(\"glue-sst2-train.parquet\")\r\ndataset = load_dataset(\"parquet\", data_files='glue-sst2-train.parquet')\r\ndataset.save_to_disk(\"save_after_parquet\")\r\n```\r\n\r\n## Expected results\r\nI expected to keep label information in *dataset_info.json* file even after parquet serialization\r\n\r\n## Actual results\r\nIn the normal serialization i got\r\n```json\r\n\"label\": {\r\n \"num_classes\": 2,\r\n \"names\": [\r\n \"negative\",\r\n \"positive\"\r\n ],\r\n \"names_file\": null,\r\n \"id\": null,\r\n \"_type\": \"ClassLabel\"\r\n },\r\n```\r\nAnd after parquet serialization i got\r\n```json\r\n\"label\": {\r\n \"dtype\": \"int64\",\r\n \"id\": null,\r\n \"_type\": \"Value\"\r\n },\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.18.0\r\n- Platform: ubuntu 20.04\r\n- Python version: 3.8.10\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3578\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3578\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3577","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3577\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3577\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3577\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3577","id":1102598241,"node_id":"I_kwDODunzps5BuFBh","number":3577,"title":"Add The Mexican Emotional Speech Database (MESD)","user":{"login":"omarespejel","id":4755430,"node_id":"MDQ6VXNlcjQ3NTU0MzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4755430?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/omarespejel","html_url":"https:\/\/github.com\/omarespejel","followers_url":"https:\/\/api.github.com\/users\/omarespejel\/followers","following_url":"https:\/\/api.github.com\/users\/omarespejel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/omarespejel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/omarespejel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/omarespejel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/omarespejel\/orgs","repos_url":"https:\/\/api.github.com\/users\/omarespejel\/repos","events_url":"https:\/\/api.github.com\/users\/omarespejel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/omarespejel\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-13T23:49:36Z","updated_at":"2022-01-27T14:14:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *The Mexican Emotional Speech Database (MESD)*\r\n- **Description:** *Contains 864 voice recordings with six different prosodies: anger, disgust, fear, happiness, neutral, and sadness. Furthermore, three voice categories are included: female adult, male adult, and child. *\r\n- **Paper:** *[Paper](https:\/\/ieeexplore.ieee.org\/abstract\/document\/9629934\/authors#authors)*\r\n- **Data:** *[link to the Github repository or current dataset location](https:\/\/data.mendeley.com\/datasets\/cy34mh68j9\/3)*\r\n- **Motivation:** *Would add Spanish speech data to the HF datasets :) *\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3577\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3577\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3576","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3576\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3576\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3576\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3576","id":1102059651,"node_id":"PR_kwDODunzps4w8sUm","number":3576,"title":"Add PASS dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-13T17:16:07Z","updated_at":"2022-01-20T16:50:48Z","closed_at":"2022-01-20T16:50:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3576","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3576","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3576.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3576.patch","merged_at":"2022-01-20T16:50:47Z"},"body":"This PR adds the PASS dataset.\r\n\r\nCloses #3043 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3576\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3576\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3575","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3575\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3575\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3575\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3575","id":1101947955,"node_id":"PR_kwDODunzps4w8Usm","number":3575,"title":"Add Arrow type casting to struct for Image and Audio + Support nested casting","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2022-01-13T15:36:59Z","updated_at":"2022-01-21T13:22:28Z","closed_at":"2022-01-21T13:22:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3575","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3575","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3575.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3575.patch","merged_at":"2022-01-21T13:22:27Z"},"body":"## Intro\r\n\r\n1. Currently, it's not possible to have nested features containing Audio or Image. \r\n\r\n2. Moreover one can keep an Arrow array as a StringArray to store paths to images, but such arrays can't be directly concatenated to another image array if it's stored an another Arrow type (typically, a StructType).\r\n\r\n3. Allowing several Arrow types for a single HF feature type also leads to bugs like this one #3497 \r\n\r\n4. Issues like #3247 are quite frequent and happen when Arrow fails to reorder StructArrays. \r\n\r\n5. Casting Audio feature type is blocking preparation for the ASR task template: https:\/\/github.com\/huggingface\/datasets\/pull\/3364\r\n\r\nAll those issues are linked together by the fact that:\r\n- we are limited by the Arrow type casting which is lacking features for nested types.\r\n- and especially for Audio and Image: they are not robust enough for concatenation and feature inference.\r\n\r\n## Proposed solution\r\n\r\nTo fix 1 and 4 I implemented nested array type casting (which is missing in PyArrow).\r\n\r\nTo fix 2, 3 and 5 while having a simple implementation for nested array type casting, I changed the storage type of Audio and Image to always be a StructType. Also casting from StringType is directly implemented via a new function `cast_storage` that is defined individually for Audio and Image. I also added nested decoding.\r\n\r\n## Implementation details\r\n\r\n### I. Better Arrow data type casting for nested data structures\r\n\r\nI implemented new functions `array_cast` and `table_cast` that do the exact same as `pyarrow.Array.cast` or `pyarrow.Table.cast` but support nested struct casting and array re-ordering.\r\n\r\nThese functions can be used on PyArrow objects, and are already integrated in our own `datasets.table.Table.cast` functions. So one can do `my_dataset.data.cast(pyarrow_schema_with_custom_hf_types)` directly.\r\n\r\n### II. New image and audio extension types with custom casting\r\n\r\nI used PyArrow extension types to be able to define what casting is allowed or not. For example both StringType->ImageExtensionType and StructType->ImageExtensionType are allowed, via the `cast_storage` method.\r\n\r\nI factorized all the PyArrow + Pandas extension stuff in the `base_extension.py` file. This aims at separating the front-facing API code of `datasets` from the Arrow back-end which requires advanced knowledge.\r\n\r\n### III. Nested feature decoding\r\n\r\nI added a new function `decode_nested_example` to decode image and audio data in nested data structures. For optimization's sake, this function is only called if a column has at least one feature that requires decoding.\r\n\r\n## Alternative considered\r\n\r\nThe casting to struct type could have been done directly with python objects using some Audio and Image methods, but bringing arrow data to python objects is expensive. The Audio and Image types could also have been able to convert the arrow data directly, but this is not convenient to use when casting a full Arrow Table with nested fields. Therefore I decided to keep the Arrow data casting logic in Arrow extension types.\r\n\r\n## Future work\r\n\r\nThis work can be used to allow the ArrayND feature types to be nested too (see issue #887)\r\n\r\n## TODO\r\n\r\n- [x] fix current tests\r\n- [x] add new tests\r\n- [x] docstrings\/comments","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3575\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3575\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3574","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3574\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3574\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3574\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3574","id":1101781401,"node_id":"PR_kwDODunzps4w7vu6","number":3574,"title":"Fix qa4mre tags","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-13T13:56:59Z","updated_at":"2022-01-13T14:03:02Z","closed_at":"2022-01-13T14:03:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3574","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3574","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3574.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3574.patch","merged_at":"2022-01-13T14:03:01Z"},"body":"The YAML tags were invalid. I also fixed the dataset mirroring logging that failed because of this issue [here](https:\/\/github.com\/huggingface\/datasets\/actions\/runs\/1690109581)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3574\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3574\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3573","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3573\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3573\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3573\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3573","id":1101157676,"node_id":"PR_kwDODunzps4w5oE_","number":3573,"title":"Add Mauve metric","user":{"login":"jthickstun","id":2321244,"node_id":"MDQ6VXNlcjIzMjEyNDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2321244?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jthickstun","html_url":"https:\/\/github.com\/jthickstun","followers_url":"https:\/\/api.github.com\/users\/jthickstun\/followers","following_url":"https:\/\/api.github.com\/users\/jthickstun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jthickstun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jthickstun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jthickstun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jthickstun\/orgs","repos_url":"https:\/\/api.github.com\/users\/jthickstun\/repos","events_url":"https:\/\/api.github.com\/users\/jthickstun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jthickstun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-13T03:52:48Z","updated_at":"2022-01-20T15:00:08Z","closed_at":"2022-01-20T15:00:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3573","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3573","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3573.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3573.patch","merged_at":"2022-01-20T15:00:07Z"},"body":"Add support for the [Mauve](https:\/\/github.com\/krishnap25\/mauve) metric introduced in this [paper](https:\/\/arxiv.org\/pdf\/2102.01454.pdf) (Neurips, 2021).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3573\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3573\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3572","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3572\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3572\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3572\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3572","id":1100634244,"node_id":"I_kwDODunzps5BmliE","number":3572,"title":"ConnectionError: Couldn't reach https:\/\/storage.googleapis.com\/ai4bharat-public-indic-nlp-corpora\/evaluations\/wikiann-ner.tar.gz (error 403)","user":{"login":"sahoodib","id":79107194,"node_id":"MDQ6VXNlcjc5MTA3MTk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79107194?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sahoodib","html_url":"https:\/\/github.com\/sahoodib","followers_url":"https:\/\/api.github.com\/users\/sahoodib\/followers","following_url":"https:\/\/api.github.com\/users\/sahoodib\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sahoodib\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sahoodib\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sahoodib\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sahoodib\/orgs","repos_url":"https:\/\/api.github.com\/users\/sahoodib\/repos","events_url":"https:\/\/api.github.com\/users\/sahoodib\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sahoodib\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-12T17:59:36Z","updated_at":"2022-01-17T13:15:28Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:**IndicGLUE**\r\n- **Description:** *natural language understanding benchmark for Indian languages*\r\n- **Paper:** *(https:\/\/indicnlp.ai4bharat.org\/home\/)*\r\n- **Data:** *https:\/\/huggingface.co\/datasets\/indic_glue#data-fields*\r\n- **Motivation:** *I am trying to train my model on Indian languages*\r\n\r\n\r\n\r\nWhile I am trying to load dataset it is giving me with the above error.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3572\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3572\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3571","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3571\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3571\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3571\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3571","id":1100519604,"node_id":"PR_kwDODunzps4w3fVQ","number":3571,"title":"Add missing tasks to MuchoCine dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-12T16:07:32Z","updated_at":"2022-01-20T16:51:08Z","closed_at":"2022-01-20T16:51:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3571","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3571","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3571.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3571.patch","merged_at":"2022-01-20T16:51:07Z"},"body":"Addresses the 2nd bullet point in #2520.\r\n\r\nI'm also removing the licensing information, because I couldn't verify that it is correct. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3571\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3571\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3570","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3570\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3570\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3570\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3570","id":1100480791,"node_id":"PR_kwDODunzps4w3Xez","number":3570,"title":"Add the KMWP dataset (extension of #3564)","user":{"login":"sooftware","id":42150335,"node_id":"MDQ6VXNlcjQyMTUwMzM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42150335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sooftware","html_url":"https:\/\/github.com\/sooftware","followers_url":"https:\/\/api.github.com\/users\/sooftware\/followers","following_url":"https:\/\/api.github.com\/users\/sooftware\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sooftware\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sooftware\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sooftware\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sooftware\/orgs","repos_url":"https:\/\/api.github.com\/users\/sooftware\/repos","events_url":"https:\/\/api.github.com\/users\/sooftware\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sooftware\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-12T15:33:08Z","updated_at":"2022-01-26T02:16:48Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3570","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3570","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3570.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3570.patch","merged_at":null},"body":"New pull request of #3564 (Add the KMWP dataset)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3570\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3570\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3569","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3569\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3569\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3569\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3569","id":1100478994,"node_id":"PR_kwDODunzps4w3XGo","number":3569,"title":"Add the DKTC dataset (Extension of #3564)","user":{"login":"sooftware","id":42150335,"node_id":"MDQ6VXNlcjQyMTUwMzM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42150335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sooftware","html_url":"https:\/\/github.com\/sooftware","followers_url":"https:\/\/api.github.com\/users\/sooftware\/followers","following_url":"https:\/\/api.github.com\/users\/sooftware\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sooftware\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sooftware\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sooftware\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sooftware\/orgs","repos_url":"https:\/\/api.github.com\/users\/sooftware\/repos","events_url":"https:\/\/api.github.com\/users\/sooftware\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sooftware\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2022-01-12T15:31:29Z","updated_at":"2022-01-26T02:16:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3569","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3569","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3569.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3569.patch","merged_at":null},"body":"New pull request of #3564. (for DKTC)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3569\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3569\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3568","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3568\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3568\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3568\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3568","id":1100380631,"node_id":"I_kwDODunzps5BlnnX","number":3568,"title":"Downloading Hugging Face Medical Dialog Dataset NonMatchingSplitsSizesError","user":{"login":"fabianslife","id":49265757,"node_id":"MDQ6VXNlcjQ5MjY1NzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49265757?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fabianslife","html_url":"https:\/\/github.com\/fabianslife","followers_url":"https:\/\/api.github.com\/users\/fabianslife\/followers","following_url":"https:\/\/api.github.com\/users\/fabianslife\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fabianslife\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fabianslife\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fabianslife\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fabianslife\/orgs","repos_url":"https:\/\/api.github.com\/users\/fabianslife\/repos","events_url":"https:\/\/api.github.com\/users\/fabianslife\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fabianslife\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-12T14:03:44Z","updated_at":"2022-01-17T13:15:41Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I wanted to download the Nedical Dialog Dataset from huggingface, using this github link:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/medical_dialog\r\n\r\nAfter downloading the raw datasets from google drive, i unpacked everything and put it in the same folder as the medical_dialog.py which is:\r\n\r\n```\r\nimport copy\r\nimport os\r\nimport re\r\n\r\nimport datasets\r\n\r\n\r\n_CITATION = \"\"\"\\\r\n@article{chen2020meddiag,\r\n title={MedDialog: a large-scale medical dialogue dataset},\r\n author={Chen, Shu and Ju, Zeqian and Dong, Xiangyu and Fang, Hongchao and Wang, Sicheng and Yang, Yue and Zeng, Jiaqi and Zhang, Ruisi and Zhang, Ruoyu and Zhou, Meng and Zhu, Penghui and Xie, Pengtao},\r\n journal={arXiv preprint arXiv:2004.03329},\r\n year={2020}\r\n}\r\n\"\"\"\r\n\r\n\r\n_DESCRIPTION = \"\"\"\\\r\nThe MedDialog dataset (English) contains conversations (in English) between doctors and patients.\\\r\nIt has 0.26 million dialogues. The data is continuously growing and more dialogues will be added. \\\r\nThe raw dialogues are from healthcaremagic.com and icliniq.com.\\\r\n\r\nAll copyrights of the data belong to healthcaremagic.com and icliniq.com.\r\n\"\"\"\r\n\r\n_HOMEPAGE = \"https:\/\/github.com\/UCSD-AI4H\/Medical-Dialogue-System\"\r\n\r\n_LICENSE = \"\"\r\n\r\n\r\nclass MedicalDialog(datasets.GeneratorBasedBuilder):\r\n VERSION = datasets.Version(\"1.0.0\")\r\n\r\n BUILDER_CONFIGS = [\r\n datasets.BuilderConfig(name=\"en\", description=\"The dataset of medical dialogs in English.\", version=VERSION),\r\n datasets.BuilderConfig(name=\"zh\", description=\"The dataset of medical dialogs in Chinese.\", version=VERSION),\r\n ]\r\n\r\n @property\r\n def manual_download_instructions(self):\r\n return \"\"\"\\\r\n \\n For English:\\nYou need to go to https:\/\/drive.google.com\/drive\/folders\/1g29ssimdZ6JzTST6Y8g6h-ogUNReBtJD?usp=sharing,\\\r\n and manually download the dataset from Google Drive. Once it is completed,\r\n a file named Medical-Dialogue-Dataset-English-.zip will appear in your Downloads folder(\r\n or whichever folder your browser chooses to save files to). Unzip the folder to obtain\r\n a folder named \"Medical-Dialogue-Dataset-English\" several text files.\r\n\r\n Now, you can specify the path to this folder for the data_dir argument in the\r\n datasets.load_dataset(...) option.\r\n The can e.g. be \"\/Downloads\/Medical-Dialogue-Dataset-English\".\r\n The data can then be loaded using the below command:\\\r\n datasets.load_dataset(\"medical_dialog\", name=\"en\", data_dir=\"\/Downloads\/Medical-Dialogue-Dataset-English\")`.\r\n\r\n \\n For Chinese:\\nFollow the above process. Change the 'name' to 'zh'.The download link is https:\/\/drive.google.com\/drive\/folders\/1r09_i8nJ9c1nliXVGXwSqRYqklcHd9e2\r\n\r\n **NOTE**\r\n - A caution while downloading from drive. It is better to download single files since creating a zip might not include files <500 MB. This has been observed mutiple times.\r\n - After downloading the files and adding them to the appropriate folder, the path of the folder can be given as input tu the data_dir path.\r\n \"\"\"\r\n\r\n datasets.load_dataset(\"medical_dialog\", name=\"en\", data_dir=\"Medical-Dialogue-Dataset-English\")\r\n\r\n def _info(self):\r\n if self.config.name == \"zh\":\r\n features = datasets.Features(\r\n {\r\n \"file_name\": datasets.Value(\"string\"),\r\n \"dialogue_id\": datasets.Value(\"int32\"),\r\n \"dialogue_url\": datasets.Value(\"string\"),\r\n \"dialogue_turns\": datasets.Sequence(\r\n {\r\n \"speaker\": datasets.ClassLabel(names=[\"\u75c5\u4eba\", \"\u533b\u751f\"]),\r\n \"utterance\": datasets.Value(\"string\"),\r\n }\r\n ),\r\n }\r\n )\r\n\r\n if self.config.name == \"en\":\r\n features = datasets.Features(\r\n {\r\n \"file_name\": datasets.Value(\"string\"),\r\n \"dialogue_id\": datasets.Value(\"int32\"),\r\n \"dialogue_url\": datasets.Value(\"string\"),\r\n \"dialogue_turns\": datasets.Sequence(\r\n {\r\n \"speaker\": datasets.ClassLabel(names=[\"Patient\", \"Doctor\"]),\r\n \"utterance\": datasets.Value(\"string\"),\r\n }\r\n ),\r\n }\r\n )\r\n\r\n return datasets.DatasetInfo(\r\n # This is the description that will appear on the datasets page.\r\n description=_DESCRIPTION,\r\n features=features,\r\n supervised_keys=None,\r\n # Homepage of the dataset for documentation\r\n homepage=_HOMEPAGE,\r\n # License for the dataset if available\r\n license=_LICENSE,\r\n # Citation for the dataset\r\n citation=_CITATION,\r\n )\r\n\r\n def _split_generators(self, dl_manager):\r\n \"\"\"Returns SplitGenerators.\"\"\"\r\n path_to_manual_file = os.path.abspath(os.path.expanduser(dl_manager.manual_dir))\r\n if not os.path.exists(path_to_manual_file):\r\n raise FileNotFoundError(\r\n f\"{path_to_manual_file} does not exist. Make sure you insert a manual dir via `datasets.load_dataset('medical_dialog', data_dir=...)`. Manual download instructions: {self.manual_download_instructions})\"\r\n )\r\n\r\n filepaths = [\r\n os.path.join(path_to_manual_file, txt_file_name)\r\n for txt_file_name in sorted(os.listdir(path_to_manual_file))\r\n if txt_file_name.endswith(\"txt\")\r\n ]\r\n\r\n return [datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={\"filepaths\": filepaths})]\r\n\r\n def _generate_examples(self, filepaths):\r\n \"\"\"Yields examples. Iterates over each file and give the creates the corresponding features.\r\n\r\n NOTE:\r\n - The code makes some assumption on the structure of the raw .txt file.\r\n - There are some checks to separate different id's. Hopefully, should not cause further issues later when more txt files are added.\r\n \"\"\"\r\n data_lang = self.config.name\r\n id_ = -1\r\n for filepath in filepaths:\r\n with open(filepath, encoding=\"utf-8\") as f_in:\r\n # Parameters to just \"sectionize\" the raw data\r\n last_part = \"\"\r\n last_dialog = {}\r\n last_list = []\r\n last_user = \"\"\r\n check_list = []\r\n\r\n # These flags are present to have a single function address both chinese and english data\r\n # English data is a little hahazard (i.e. the sentences spans multiple different lines),\r\n # Chinese is compact with one line for doctor and patient.\r\n conv_flag = False\r\n des_flag = False\r\n\r\n while True:\r\n line = f_in.readline()\r\n if not line:\r\n break\r\n\r\n # Extracting the dialog id\r\n if line[:2] == \"id\": # Hardcode alert!\r\n # Handling ID references that may come in the description\r\n # These were observed in the Chinese dataset and were not\r\n # followed by numbers\r\n try:\r\n dialogue_id = int(re.findall(r\"\\d+\", line)[0])\r\n except IndexError:\r\n continue\r\n\r\n # Extracting the url\r\n if line[:4] == \"http\": # Hardcode alert!\r\n dialogue_url = line.rstrip()\r\n\r\n # Extracting the patient info from description.\r\n if line[:11] == \"Description\": # Hardcode alert!\r\n last_part = \"description\"\r\n last_dialog = {}\r\n last_list = []\r\n last_user = \"\"\r\n last_conv = {\"speaker\": \"\", \"utterance\": \"\"}\r\n while True:\r\n line = f_in.readline()\r\n if (not line) or (line in [\"\\n\", \"\\n\\r\"]):\r\n break\r\n else:\r\n if data_lang == \"zh\": # Condition in chinese\r\n if line[:5] == \"\u75c5\u60c5\u63cf\u8ff0\uff1a\": # Hardcode alert!\r\n last_user = \"\u75c5\u4eba\"\r\n sen = f_in.readline().rstrip()\r\n des_flag = True\r\n\r\n if data_lang == \"en\":\r\n last_user = \"Patient\"\r\n sen = line.rstrip()\r\n des_flag = True\r\n\r\n if des_flag:\r\n if sen == \"\":\r\n continue\r\n if sen in check_list:\r\n last_conv[\"speaker\"] = \"\"\r\n last_conv[\"utterance\"] = \"\"\r\n else:\r\n last_conv[\"speaker\"] = last_user\r\n last_conv[\"utterance\"] = sen\r\n check_list.append(sen)\r\n des_flag = False\r\n break\r\n # Extracting the conversation info from dialogue.\r\n elif line[:8] == \"Dialogue\": # Hardcode alert!\r\n if last_part == \"description\" and len(last_conv[\"utterance\"]) > 0:\r\n last_part = \"dialogue\"\r\n if data_lang == \"zh\":\r\n last_user = \"\u75c5\u4eba\"\r\n\r\n if data_lang == \"en\":\r\n last_user = \"Patient\"\r\n\r\n while True:\r\n line = f_in.readline()\r\n if (not line) or (line in [\"\\n\", \"\\n\\r\"]):\r\n conv_flag = False\r\n last_user = \"\"\r\n last_list.append(copy.deepcopy(last_conv))\r\n # To ensure close of conversation, only even number of sentences\r\n # are extracted\r\n last_turn = len(last_list)\r\n if int(last_turn \/ 2) > 0:\r\n temp = int(last_turn \/ 2)\r\n id_ += 1\r\n last_dialog[\"file_name\"] = filepath\r\n last_dialog[\"dialogue_id\"] = dialogue_id\r\n last_dialog[\"dialogue_url\"] = dialogue_url\r\n last_dialog[\"dialogue_turns\"] = last_list[: temp * 2]\r\n yield id_, last_dialog\r\n break\r\n\r\n if data_lang == \"zh\":\r\n if line[:3] == \"\u75c5\u4eba\uff1a\" or line[:3] == \"\u533b\u751f\uff1a\": # Hardcode alert!\r\n user = line[:2] # Hardcode alert!\r\n line = f_in.readline()\r\n conv_flag = True\r\n\r\n # The elif block is to ensure that multi-line sentences are captured.\r\n # This has been observed only in english.\r\n if data_lang == \"en\":\r\n if line.strip() == \"Patient:\" or line.strip() == \"Doctor:\": # Hardcode alert!\r\n user = line.replace(\":\", \"\").rstrip()\r\n line = f_in.readline()\r\n conv_flag = True\r\n elif line[:2] != \"id\": # Hardcode alert!\r\n conv_flag = True\r\n\r\n # Continues till the next ID is parsed\r\n if conv_flag:\r\n sen = line.rstrip()\r\n if sen == \"\":\r\n continue\r\n\r\n if user == last_user:\r\n last_conv[\"utterance\"] = last_conv[\"utterance\"] + sen\r\n else:\r\n last_user = user\r\n last_list.append(copy.deepcopy(last_conv))\r\n last_conv[\"utterance\"] = sen\r\n last_conv[\"speaker\"] = user\r\n```\r\n\r\nrunning this code gives me the error:\r\n\r\n```\r\n File \"C:\\Users\\Fabia\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\datasets\\utils\\info_utils.py\", line 74, in verify_splits\r\n raise NonMatchingSplitsSizesError(str(bad_splits))\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=0, num_examples=0, dataset_name='medical_dialog'), 'recorded': SplitInfo(name='train', num_bytes=292801173, num_examples=229674, dataset_name='medical_dialog')}]\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3568\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3568\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3567","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3567\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3567\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3567\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3567","id":1100296696,"node_id":"PR_kwDODunzps4w2xDl","number":3567,"title":"Fix push to hub to allow individual split push","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-12T12:42:58Z","updated_at":"2022-01-12T13:29:01Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3567","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3567","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3567.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3567.patch","merged_at":null},"body":"# Description of the issue\r\n\r\nIf one decides to push a split on a datasets repo, he uploads the dataset and overrides the config. However previous config splits end up being lost despite still having the dataset necessary.\r\n\r\nThe new flow is the following:\r\n - query the old config from the repo\r\n - update into a new config (add\/overwrite new split for example)\r\n - push the new config\r\n\r\n# Side fix\r\n\r\n - `repo_id` in HfFileSystem was wrongly typed.\r\n - I've added `indent=2` as it becomes much easier to read now.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3567\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3567\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3566","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3566\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3566\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3566\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3566","id":1100155902,"node_id":"PR_kwDODunzps4w2Tcc","number":3566,"title":"Add initial electricity time series dataset","user":{"login":"kashif","id":8100,"node_id":"MDQ6VXNlcjgxMDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8100?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kashif","html_url":"https:\/\/github.com\/kashif","followers_url":"https:\/\/api.github.com\/users\/kashif\/followers","following_url":"https:\/\/api.github.com\/users\/kashif\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kashif\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kashif\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kashif\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kashif\/orgs","repos_url":"https:\/\/api.github.com\/users\/kashif\/repos","events_url":"https:\/\/api.github.com\/users\/kashif\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kashif\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-12T10:21:32Z","updated_at":"2022-02-11T12:02:39Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3566","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3566","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3566.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3566.patch","merged_at":null},"body":"Here is an initial prototype time series dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3566\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3566\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3565","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3565\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3565\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3565\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3565","id":1099296693,"node_id":"PR_kwDODunzps4wzjhH","number":3565,"title":"Add parameter `preserve_index` to `from_pandas`","user":{"login":"Sorrow321","id":20703486,"node_id":"MDQ6VXNlcjIwNzAzNDg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20703486?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Sorrow321","html_url":"https:\/\/github.com\/Sorrow321","followers_url":"https:\/\/api.github.com\/users\/Sorrow321\/followers","following_url":"https:\/\/api.github.com\/users\/Sorrow321\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Sorrow321\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Sorrow321\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Sorrow321\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Sorrow321\/orgs","repos_url":"https:\/\/api.github.com\/users\/Sorrow321\/repos","events_url":"https:\/\/api.github.com\/users\/Sorrow321\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Sorrow321\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-11T15:26:37Z","updated_at":"2022-01-12T16:11:27Z","closed_at":"2022-01-12T16:11:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3565","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3565","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3565.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3565.patch","merged_at":"2022-01-12T16:11:26Z"},"body":"Added optional parameter, so that user can get rid of useless index preserving. [Issue](https:\/\/github.com\/huggingface\/datasets\/issues\/3563)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3565\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3565\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3564","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3564\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3564\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3564\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3564","id":1099214403,"node_id":"PR_kwDODunzps4wzSOL","number":3564,"title":" Add the KMWP & DKTC dataset.","user":{"login":"sooftware","id":42150335,"node_id":"MDQ6VXNlcjQyMTUwMzM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42150335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sooftware","html_url":"https:\/\/github.com\/sooftware","followers_url":"https:\/\/api.github.com\/users\/sooftware\/followers","following_url":"https:\/\/api.github.com\/users\/sooftware\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sooftware\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sooftware\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sooftware\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sooftware\/orgs","repos_url":"https:\/\/api.github.com\/users\/sooftware\/repos","events_url":"https:\/\/api.github.com\/users\/sooftware\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sooftware\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-11T14:14:08Z","updated_at":"2022-01-12T15:33:49Z","closed_at":"2022-01-12T15:33:28Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3564","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3564","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3564.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3564.patch","merged_at":null},"body":" Add the DKTC dataset.\r\n- https:\/\/github.com\/tunib-ai\/DKTC","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3564\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3564\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3563","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3563\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3563\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3563\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3563","id":1099070368,"node_id":"I_kwDODunzps5Bgnug","number":3563,"title":"Dataset.from_pandas preserves useless index","user":{"login":"Sorrow321","id":20703486,"node_id":"MDQ6VXNlcjIwNzAzNDg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20703486?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Sorrow321","html_url":"https:\/\/github.com\/Sorrow321","followers_url":"https:\/\/api.github.com\/users\/Sorrow321\/followers","following_url":"https:\/\/api.github.com\/users\/Sorrow321\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Sorrow321\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Sorrow321\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Sorrow321\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Sorrow321\/orgs","repos_url":"https:\/\/api.github.com\/users\/Sorrow321\/repos","events_url":"https:\/\/api.github.com\/users\/Sorrow321\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Sorrow321\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-11T12:07:07Z","updated_at":"2022-01-12T16:11:27Z","closed_at":"2022-01-12T16:11:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nLet's say that you want to create a Dataset object from pandas dataframe. Most likely you will write something like this:\r\n\r\n```\r\nimport pandas as pd\r\nfrom datasets import Dataset\r\n\r\n\r\ndf = pd.read_csv('some_dataset.csv')\r\n# Some DataFrame preprocessing code...\r\ndataset = Dataset.from_pandas(df)\r\n```\r\nIf your preprocessing code contain indexing operations like this:\r\n```\r\ndf = df[df.col1 == some_value]\r\n```\r\nthen your df.index can be changed from (default) ```RangeIndex(start=0, stop=16590, step=1)``` to something like this ```Int64Index([ 0, 1, 2, 3, 4, 5, 6, 7, 8,\r\n 9,\r\n ...\r\n 83979, 83980, 83981, 83982, 83983, 83984, 83985, 83986, 83987,\r\n 83988],\r\n dtype='int64', length=16590)```\r\n\r\nIn this case, PyArrow (by default) will preserve this non-standard index. In the result, your dataset object will have the extra field that you likely don't want to have: '__index_level_0__'.\r\n\r\nYou can easily fix this by just adding extra argument ```preserve_index=False``` to call of ```InMemoryTable.from_pandas``` in ```arrow_dataset.py```.\r\n\r\nIf you approve that this isn't desirable behavior, I can make a PR fixing that.\r\n\r\n## Environment info\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux-5.11.0-44-generic-x86_64-with-glibc2.31\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3563\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3563\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3562","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3562\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3562\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3562\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3562","id":1098341351,"node_id":"PR_kwDODunzps4wwa44","number":3562,"title":"Allow multiple task templates of the same type","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-10T20:32:07Z","updated_at":"2022-01-11T14:16:47Z","closed_at":"2022-01-11T14:16:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3562","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3562","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3562.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3562.patch","merged_at":"2022-01-11T14:16:46Z"},"body":"Add support for multiple task templates of the same type. Fixes (partially) #2520.\r\n\r\nCC: @lewtun ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3562\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3562\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3561","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3561\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3561\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3561\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3561","id":1098328870,"node_id":"I_kwDODunzps5Bdysm","number":3561,"title":"Cannot load \u2018bookcorpusopen\u2019","user":{"login":"HUIYINXUE","id":54684403,"node_id":"MDQ6VXNlcjU0Njg0NDAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/54684403?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/HUIYINXUE","html_url":"https:\/\/github.com\/HUIYINXUE","followers_url":"https:\/\/api.github.com\/users\/HUIYINXUE\/followers","following_url":"https:\/\/api.github.com\/users\/HUIYINXUE\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/HUIYINXUE\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/HUIYINXUE\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/HUIYINXUE\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/HUIYINXUE\/orgs","repos_url":"https:\/\/api.github.com\/users\/HUIYINXUE\/repos","events_url":"https:\/\/api.github.com\/users\/HUIYINXUE\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/HUIYINXUE\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-10T20:17:18Z","updated_at":"2022-01-20T17:23:50Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nCannot load 'bookcorpusopen'\r\n\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset = load_dataset('bookcorpusopen')\r\n```\r\nor\r\n```python\r\ndataset = load_dataset('bookcorpusopen',script_version='master')\r\n```\r\n\r\n## Actual results\r\nConnectionError: Couldn't reach https:\/\/the-eye.eu\/public\/AI\/pile_preliminary_components\/books1.tar.gz\r\n\r\n## Environment info\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux version 3.10.0-1160.45.1.el7.x86_64\r\n- Python version: 3.6.13\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3561\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3561\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3560","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3560\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3560\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3560\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3560","id":1098280652,"node_id":"PR_kwDODunzps4wwOMf","number":3560,"title":"Run pyupgrade for Python 3.6+","user":{"login":"bryant1410","id":3905501,"node_id":"MDQ6VXNlcjM5MDU1MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3905501?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bryant1410","html_url":"https:\/\/github.com\/bryant1410","followers_url":"https:\/\/api.github.com\/users\/bryant1410\/followers","following_url":"https:\/\/api.github.com\/users\/bryant1410\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bryant1410\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bryant1410\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bryant1410\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bryant1410\/orgs","repos_url":"https:\/\/api.github.com\/users\/bryant1410\/repos","events_url":"https:\/\/api.github.com\/users\/bryant1410\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bryant1410\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-10T19:20:53Z","updated_at":"2022-01-31T13:38:49Z","closed_at":"2022-01-31T09:37:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3560","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3560","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3560.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3560.patch","merged_at":"2022-01-31T09:37:34Z"},"body":"Run the command:\r\n\r\n```bash\r\npyupgrade $(find . -name \"*.py\" -type f) --py36-plus\r\n```\r\n\r\nWhich mainly avoids unnecessary lists creations and also removes unnecessary code for Python 3.6+.\r\n\r\nIt was originally part of #3489.\r\n\r\nTip for reviewing faster: use the CLI (`git diff`) and scroll.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3560\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3560\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3559","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3559\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3559\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3559\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3559","id":1098178222,"node_id":"PR_kwDODunzps4wv420","number":3559,"title":"Fix `DuplicatedKeysError` and improve card in `tweet_qa`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-10T17:27:40Z","updated_at":"2022-01-12T15:13:58Z","closed_at":"2022-01-12T15:13:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3559","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3559","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3559.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3559.patch","merged_at":"2022-01-12T15:13:56Z"},"body":"Fix #3555 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3559\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3559\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3558","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3558\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3558\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3558\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3558","id":1098025866,"node_id":"I_kwDODunzps5BcouK","number":3558,"title":"Integrate Milvus (pymilvus) library","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-10T15:20:29Z","updated_at":"2022-01-10T15:20:29Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Milvus is a popular open-source vector database. We should add a new vector index to support this project.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3558\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3558\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3557","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3557\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3557\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3557\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3557","id":1097946034,"node_id":"PR_kwDODunzps4wvIHl","number":3557,"title":"Fix bug in `ImageClassifcation` task template","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-10T14:09:59Z","updated_at":"2022-01-11T15:47:52Z","closed_at":"2022-01-11T15:47:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3557","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3557","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3557.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3557.patch","merged_at":"2022-01-11T15:47:52Z"},"body":"Fixes a bug in the `ImageClassification` task template which requires specifying class labels twice in dataset scripts. Additionally, this PR refactors the API around the classification task templates for cleaner `labels` handling.\r\n\r\nCC: @lewtun @nateraw","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3557\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3557\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3556","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3556\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3556\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3556\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3556","id":1097907724,"node_id":"PR_kwDODunzps4wvALx","number":3556,"title":"Preserve encoding\/decoding with features in `Iterable.map` call","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-10T13:32:20Z","updated_at":"2022-01-18T19:54:08Z","closed_at":"2022-01-18T19:54:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3556","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3556","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3556.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3556.patch","merged_at":"2022-01-18T19:54:07Z"},"body":"As described in https:\/\/github.com\/huggingface\/datasets\/issues\/3505#issuecomment-1004755657, this PR uses a generator expression to encode\/decode examples with `features` (which are set to None in `map`) before applying a map transform.\r\n\r\nFix #3505 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3556\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3556\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3555","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3555\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3555\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3555\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3555","id":1097736982,"node_id":"I_kwDODunzps5BbiMW","number":3555,"title":"DuplicatedKeysError when loading tweet_qa dataset","user":{"login":"LeonieWeissweiler","id":30300891,"node_id":"MDQ6VXNlcjMwMzAwODkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30300891?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LeonieWeissweiler","html_url":"https:\/\/github.com\/LeonieWeissweiler","followers_url":"https:\/\/api.github.com\/users\/LeonieWeissweiler\/followers","following_url":"https:\/\/api.github.com\/users\/LeonieWeissweiler\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LeonieWeissweiler\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LeonieWeissweiler\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LeonieWeissweiler\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LeonieWeissweiler\/orgs","repos_url":"https:\/\/api.github.com\/users\/LeonieWeissweiler\/repos","events_url":"https:\/\/api.github.com\/users\/LeonieWeissweiler\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LeonieWeissweiler\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-10T10:53:11Z","updated_at":"2022-01-12T15:17:33Z","closed_at":"2022-01-12T15:13:56Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When loading the tweet_qa dataset with `load_dataset('tweet_qa')`, the following error occurs: \r\n\r\n`DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 2a167f9e016ba338e1813fed275a6a1e\r\nKeys should be unique and deterministic in nature\r\n`\r\nMight be related to issues #2433 and #2333\r\n\r\n- `datasets` version: 1.17.0\r\n- Python version: 3.8.5\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3555\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3555\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3554","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3554\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3554\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3554\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3554","id":1097711367,"node_id":"I_kwDODunzps5Bbb8H","number":3554,"title":"ImportError: cannot import name 'is_valid_waiter_error'","user":{"login":"danielbellhv","id":84714841,"node_id":"MDQ6VXNlcjg0NzE0ODQx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/84714841?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danielbellhv","html_url":"https:\/\/github.com\/danielbellhv","followers_url":"https:\/\/api.github.com\/users\/danielbellhv\/followers","following_url":"https:\/\/api.github.com\/users\/danielbellhv\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danielbellhv\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danielbellhv\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danielbellhv\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danielbellhv\/orgs","repos_url":"https:\/\/api.github.com\/users\/danielbellhv\/repos","events_url":"https:\/\/api.github.com\/users\/danielbellhv\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danielbellhv\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-10T10:32:04Z","updated_at":"2022-01-21T09:30:24Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Based on [SO post](https:\/\/stackoverflow.com\/q\/70606147\/17840900).\r\n\r\nI'm following along to this [Notebook][1], cell \"**Loading the dataset**\".\r\n\r\nKernel: `conda_pytorch_p36`.\r\n\r\nI run:\r\n```\r\n! pip install datasets transformers optimum[intel]\r\n```\r\n\r\nOutput:\r\n```\r\nRequirement already satisfied: datasets in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (1.17.0)\r\nRequirement already satisfied: transformers in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (4.15.0)\r\nRequirement already satisfied: optimum[intel] in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (0.1.3)\r\nRequirement already satisfied: numpy>=1.17 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (1.19.5)\r\nRequirement already satisfied: dill in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (0.3.4)\r\nRequirement already satisfied: tqdm>=4.62.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (4.62.3)\r\nRequirement already satisfied: huggingface-hub<1.0.0,>=0.1.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (0.2.1)\r\nRequirement already satisfied: packaging in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (21.3)\r\nRequirement already satisfied: pyarrow!=4.0.0,>=3.0.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (6.0.1)\r\nRequirement already satisfied: pandas in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (1.1.5)\r\nRequirement already satisfied: xxhash in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (2.0.2)\r\nRequirement already satisfied: aiohttp in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (3.8.1)\r\nRequirement already satisfied: fsspec[http]>=2021.05.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (2021.11.1)\r\nRequirement already satisfied: dataclasses in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (0.8)\r\nRequirement already satisfied: multiprocess in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (0.70.12.2)\r\nRequirement already satisfied: importlib-metadata in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (4.5.0)\r\nRequirement already satisfied: requests>=2.19.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from datasets) (2.25.1)\r\nRequirement already satisfied: pyyaml>=5.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from transformers) (5.4.1)\r\nRequirement already satisfied: regex!=2019.12.17 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from transformers) (2021.4.4)\r\nRequirement already satisfied: tokenizers<0.11,>=0.10.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from transformers) (0.10.3)\r\nRequirement already satisfied: filelock in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from transformers) (3.0.12)\r\nRequirement already satisfied: sacremoses in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from transformers) (0.0.46)\r\nRequirement already satisfied: torch>=1.9 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from optimum[intel]) (1.10.1)\r\nRequirement already satisfied: sympy in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from optimum[intel]) (1.8)\r\nRequirement already satisfied: coloredlogs in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from optimum[intel]) (15.0.1)\r\nRequirement already satisfied: pycocotools in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from optimum[intel]) (2.0.3)\r\nRequirement already satisfied: neural-compressor>=1.7 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from optimum[intel]) (1.9)\r\nRequirement already satisfied: typing-extensions>=3.7.4.3 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from huggingface-hub<1.0.0,>=0.1.0->datasets) (3.10.0.0)\r\nRequirement already satisfied: sigopt in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (8.2.0)\r\nRequirement already satisfied: opencv-python in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (4.5.1.48)\r\nRequirement already satisfied: cryptography in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (3.4.7)\r\nRequirement already satisfied: py-cpuinfo in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (8.0.0)\r\nRequirement already satisfied: gevent in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (21.1.2)\r\nRequirement already satisfied: schema in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (0.7.5)\r\nRequirement already satisfied: psutil in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (5.8.0)\r\nRequirement already satisfied: gevent-websocket in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (0.10.1)\r\nRequirement already satisfied: hyperopt in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (0.2.7)\r\nRequirement already satisfied: Flask in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (2.0.1)\r\nRequirement already satisfied: prettytable in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (2.5.0)\r\nRequirement already satisfied: Flask-SocketIO in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (5.1.1)\r\nRequirement already satisfied: scikit-learn in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (0.24.2)\r\nRequirement already satisfied: Pillow in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (8.4.0)\r\nRequirement already satisfied: Flask-Cors in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from neural-compressor>=1.7->optimum[intel]) (3.0.10)\r\nRequirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from packaging->datasets) (2.4.7)\r\nRequirement already satisfied: chardet<5,>=3.0.2 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from requests>=2.19.0->datasets) (4.0.0)\r\nRequirement already satisfied: certifi>=2017.4.17 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from requests>=2.19.0->datasets) (2021.5.30)\r\nRequirement already satisfied: urllib3<1.27,>=1.21.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from requests>=2.19.0->datasets) (1.26.5)\r\nRequirement already satisfied: idna<3,>=2.5 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from requests>=2.19.0->datasets) (2.10)\r\nRequirement already satisfied: yarl<2.0,>=1.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from aiohttp->datasets) (1.6.3)\r\nRequirement already satisfied: charset-normalizer<3.0,>=2.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from aiohttp->datasets) (2.0.9)\r\nRequirement already satisfied: attrs>=17.3.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from aiohttp->datasets) (21.2.0)\r\nRequirement already satisfied: asynctest==0.13.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from aiohttp->datasets) (0.13.0)\r\nRequirement already satisfied: idna-ssl>=1.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from aiohttp->datasets) (1.1.0)\r\nRequirement already satisfied: async-timeout<5.0,>=4.0.0a3 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from aiohttp->datasets) (4.0.1)\r\nRequirement already satisfied: aiosignal>=1.1.2 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from aiohttp->datasets) (1.2.0)\r\nRequirement already satisfied: frozenlist>=1.1.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from aiohttp->datasets) (1.2.0)\r\nRequirement already satisfied: multidict<7.0,>=4.5 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from aiohttp->datasets) (5.1.0)\r\nRequirement already satisfied: humanfriendly>=9.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from coloredlogs->optimum[intel]) (10.0)\r\nRequirement already satisfied: zipp>=0.5 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from importlib-metadata->datasets) (3.4.1)\r\nRequirement already satisfied: python-dateutil>=2.7.3 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from pandas->datasets) (2.8.1)\r\nRequirement already satisfied: pytz>=2017.2 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from pandas->datasets) (2021.1)\r\nRequirement already satisfied: matplotlib>=2.1.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from pycocotools->optimum[intel]) (3.3.4)\r\nRequirement already satisfied: cython>=0.27.3 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from pycocotools->optimum[intel]) (0.29.23)\r\nRequirement already satisfied: setuptools>=18.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from pycocotools->optimum[intel]) (52.0.0.post20210125)\r\nRequirement already satisfied: joblib in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sacremoses->transformers) (1.0.1)\r\nRequirement already satisfied: click in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sacremoses->transformers) (8.0.1)\r\nRequirement already satisfied: six in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sacremoses->transformers) (1.16.0)\r\nRequirement already satisfied: mpmath>=0.19 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sympy->optimum[intel]) (1.2.1)\r\nRequirement already satisfied: kiwisolver>=1.0.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from matplotlib>=2.1.0->pycocotools->optimum[intel]) (1.3.1)\r\nRequirement already satisfied: cycler>=0.10 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/cycler-0.10.0-py3.6.egg (from matplotlib>=2.1.0->pycocotools->optimum[intel]) (0.10.0)\r\nRequirement already satisfied: cffi>=1.12 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from cryptography->neural-compressor>=1.7->optimum[intel]) (1.14.5)\r\nRequirement already satisfied: Werkzeug>=2.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from Flask->neural-compressor>=1.7->optimum[intel]) (2.0.2)\r\nRequirement already satisfied: Jinja2>=3.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from Flask->neural-compressor>=1.7->optimum[intel]) (3.0.1)\r\nRequirement already satisfied: itsdangerous>=2.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from Flask->neural-compressor>=1.7->optimum[intel]) (2.0.1)\r\nRequirement already satisfied: python-socketio>=5.0.2 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from Flask-SocketIO->neural-compressor>=1.7->optimum[intel]) (5.5.0)\r\nRequirement already satisfied: zope.event in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from gevent->neural-compressor>=1.7->optimum[intel]) (4.5.0)\r\nRequirement already satisfied: greenlet<2.0,>=0.4.17 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from gevent->neural-compressor>=1.7->optimum[intel]) (1.1.0)\r\nRequirement already satisfied: zope.interface in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from gevent->neural-compressor>=1.7->optimum[intel]) (5.4.0)\r\nRequirement already satisfied: future in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from hyperopt->neural-compressor>=1.7->optimum[intel]) (0.18.2)\r\nRequirement already satisfied: cloudpickle in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from hyperopt->neural-compressor>=1.7->optimum[intel]) (1.6.0)\r\nRequirement already satisfied: networkx>=2.2 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from hyperopt->neural-compressor>=1.7->optimum[intel]) (2.5)\r\nRequirement already satisfied: scipy in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from hyperopt->neural-compressor>=1.7->optimum[intel]) (1.5.3)\r\nRequirement already satisfied: py4j in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from hyperopt->neural-compressor>=1.7->optimum[intel]) (0.10.7)\r\nRequirement already satisfied: wcwidth in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from prettytable->neural-compressor>=1.7->optimum[intel]) (0.2.5)\r\nRequirement already satisfied: contextlib2>=0.5.5 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from schema->neural-compressor>=1.7->optimum[intel]) (0.6.0.post1)\r\nRequirement already satisfied: threadpoolctl>=2.0.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from scikit-learn->neural-compressor>=1.7->optimum[intel]) (2.1.0)\r\nRequirement already satisfied: pyOpenSSL>=20.0.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sigopt->neural-compressor>=1.7->optimum[intel]) (20.0.1)\r\nRequirement already satisfied: pypng>=0.0.20 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sigopt->neural-compressor>=1.7->optimum[intel]) (0.0.21)\r\nRequirement already satisfied: kubernetes<13.0.0,>=12.0.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sigopt->neural-compressor>=1.7->optimum[intel]) (12.0.1)\r\nRequirement already satisfied: rsa<5.0.0,>=4.7 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sigopt->neural-compressor>=1.7->optimum[intel]) (4.7.2)\r\nRequirement already satisfied: boto3<2.0.0,==1.16.34 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sigopt->neural-compressor>=1.7->optimum[intel]) (1.16.34)\r\nRequirement already satisfied: Pint<0.17.0,>=0.16.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sigopt->neural-compressor>=1.7->optimum[intel]) (0.16.1)\r\nRequirement already satisfied: GitPython>=2.0.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sigopt->neural-compressor>=1.7->optimum[intel]) (3.1.18)\r\nRequirement already satisfied: backoff<2.0.0,>=1.10.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sigopt->neural-compressor>=1.7->optimum[intel]) (1.11.1)\r\nRequirement already satisfied: ipython>=5.0.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sigopt->neural-compressor>=1.7->optimum[intel]) (7.16.1)\r\nRequirement already satisfied: docker<5.0.0,>=4.4.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from sigopt->neural-compressor>=1.7->optimum[intel]) (4.4.4)\r\nRequirement already satisfied: jmespath<1.0.0,>=0.7.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from boto3<2.0.0,==1.16.34->sigopt->neural-compressor>=1.7->optimum[intel]) (0.10.0)\r\nRequirement already satisfied: s3transfer<0.4.0,>=0.3.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from boto3<2.0.0,==1.16.34->sigopt->neural-compressor>=1.7->optimum[intel]) (0.3.7)\r\nRequirement already satisfied: botocore<1.20.0,>=1.19.34 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from boto3<2.0.0,==1.16.34->sigopt->neural-compressor>=1.7->optimum[intel]) (1.19.63)\r\nRequirement already satisfied: pycparser in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from cffi>=1.12->cryptography->neural-compressor>=1.7->optimum[intel]) (2.20)\r\nRequirement already satisfied: websocket-client>=0.32.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from docker<5.0.0,>=4.4.0->sigopt->neural-compressor>=1.7->optimum[intel]) (0.58.0)\r\nRequirement already satisfied: gitdb<5,>=4.0.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from GitPython>=2.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (4.0.9)\r\nRequirement already satisfied: traitlets>=4.2 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (4.3.3)\r\nRequirement already satisfied: jedi>=0.10 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (0.17.2)\r\nRequirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (3.0.19)\r\nRequirement already satisfied: backcall in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (0.2.0)\r\nRequirement already satisfied: pygments in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (2.9.0)\r\nRequirement already satisfied: pexpect in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (4.8.0)\r\nRequirement already satisfied: decorator in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (5.0.9)\r\nRequirement already satisfied: pickleshare in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (0.7.5)\r\nRequirement already satisfied: MarkupSafe>=2.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from Jinja2>=3.0->Flask->neural-compressor>=1.7->optimum[intel]) (2.0.1)\r\nRequirement already satisfied: google-auth>=1.0.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from kubernetes<13.0.0,>=12.0.1->sigopt->neural-compressor>=1.7->optimum[intel]) (1.30.2)\r\nRequirement already satisfied: requests-oauthlib in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from kubernetes<13.0.0,>=12.0.1->sigopt->neural-compressor>=1.7->optimum[intel]) (1.3.0)\r\nRequirement already satisfied: importlib-resources in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from Pint<0.17.0,>=0.16.0->sigopt->neural-compressor>=1.7->optimum[intel]) (5.4.0)\r\nRequirement already satisfied: python-engineio>=4.3.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from python-socketio>=5.0.2->Flask-SocketIO->neural-compressor>=1.7->optimum[intel]) (4.3.0)\r\nRequirement already satisfied: bidict>=0.21.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from python-socketio>=5.0.2->Flask-SocketIO->neural-compressor>=1.7->optimum[intel]) (0.21.4)\r\nRequirement already satisfied: pyasn1>=0.1.3 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from rsa<5.0.0,>=4.7->sigopt->neural-compressor>=1.7->optimum[intel]) (0.4.8)\r\nRequirement already satisfied: smmap<6,>=3.0.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from gitdb<5,>=4.0.1->GitPython>=2.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (5.0.0)\r\nRequirement already satisfied: pyasn1-modules>=0.2.1 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from google-auth>=1.0.1->kubernetes<13.0.0,>=12.0.1->sigopt->neural-compressor>=1.7->optimum[intel]) (0.2.8)\r\nRequirement already satisfied: cachetools<5.0,>=2.0.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from google-auth>=1.0.1->kubernetes<13.0.0,>=12.0.1->sigopt->neural-compressor>=1.7->optimum[intel]) (4.2.2)\r\nRequirement already satisfied: parso<0.8.0,>=0.7.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from jedi>=0.10->ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (0.7.1)\r\nRequirement already satisfied: ipython-genutils in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from traitlets>=4.2->ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (0.2.0)\r\nRequirement already satisfied: ptyprocess>=0.5 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from pexpect->ipython>=5.0.0->sigopt->neural-compressor>=1.7->optimum[intel]) (0.7.0)\r\nRequirement already satisfied: oauthlib>=3.0.0 in \/home\/ec2-user\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages (from requests-oauthlib->kubernetes<13.0.0,>=12.0.1->sigopt->neural-compressor>=1.7->optimum[intel]) (3.1.1)\r\n```\r\n\r\n---\r\n\r\n**Cell:**\r\n```python\r\nfrom datasets import load_dataset, load_metric\r\n```\r\nOR\r\n```python\r\nimport datasets\r\n```\r\n\r\n**Traceback:**\r\n```\r\n---------------------------------------------------------------------------\r\nImportError Traceback (most recent call last)\r\n in \r\n----> 1 from datasets import load_dataset, load_metric\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/datasets\/__init__.py in \r\n 32 )\r\n 33 \r\n---> 34 from .arrow_dataset import Dataset, concatenate_datasets\r\n 35 from .arrow_reader import ArrowReader, ReadInstruction\r\n 36 from .arrow_writer import ArrowWriter\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py in \r\n 59 from . import config, utils\r\n 60 from .arrow_reader import ArrowReader\r\n---> 61 from .arrow_writer import ArrowWriter, OptimizedTypedSequence\r\n 62 from .features import ClassLabel, Features, FeatureType, Sequence, Value, _ArrayXD, pandas_types_mapper\r\n 63 from .filesystems import extract_path_from_uri, is_remote_filesystem\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py in \r\n 26 \r\n 27 from . import config, utils\r\n---> 28 from .features import (\r\n 29 Features,\r\n 30 ImageExtensionType,\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/datasets\/features\/__init__.py in \r\n 1 # flake8: noqa\r\n----> 2 from .audio import Audio\r\n 3 from .features import *\r\n 4 from .features import (\r\n 5 _ArrayXD,\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/datasets\/features\/audio.py in \r\n 5 import pyarrow as pa\r\n 6 \r\n----> 7 from ..utils.streaming_download_manager import xopen\r\n 8 \r\n 9 \r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/datasets\/utils\/streaming_download_manager.py in \r\n 16 \r\n 17 from .. import config\r\n---> 18 from ..filesystems import COMPRESSION_FILESYSTEMS\r\n 19 from .download_manager import DownloadConfig, map_nested\r\n 20 from .file_utils import (\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/datasets\/filesystems\/__init__.py in \r\n 11 \r\n 12 if _has_s3fs:\r\n---> 13 from .s3filesystem import S3FileSystem # noqa: F401\r\n 14 \r\n 15 COMPRESSION_FILESYSTEMS: List[compression.BaseCompressedFileFileSystem] = [\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/datasets\/filesystems\/s3filesystem.py in \r\n----> 1 import s3fs\r\n 2 \r\n 3 \r\n 4 class S3FileSystem(s3fs.S3FileSystem):\r\n 5 \"\"\"\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/s3fs\/__init__.py in \r\n----> 1 from .core import S3FileSystem, S3File\r\n 2 from .mapping import S3Map\r\n 3 \r\n 4 from ._version import get_versions\r\n 5 \r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/s3fs\/core.py in \r\n 12 from fsspec.asyn import AsyncFileSystem, sync, sync_wrapper\r\n 13 \r\n---> 14 import aiobotocore\r\n 15 import botocore\r\n 16 import aiobotocore.session\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/aiobotocore\/__init__.py in \r\n----> 1 from .session import get_session, AioSession\r\n 2 \r\n 3 __all__ = ['get_session', 'AioSession']\r\n 4 __version__ = '1.3.0'\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/aiobotocore\/session.py in \r\n 4 from botocore import retryhandler, translate\r\n 5 from botocore.exceptions import PartialCredentialsError\r\n----> 6 from .client import AioClientCreator, AioBaseClient\r\n 7 from .hooks import AioHierarchicalEmitter\r\n 8 from .parsers import AioResponseParserFactory\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/aiobotocore\/client.py in \r\n 11 from .args import AioClientArgsCreator\r\n 12 from .utils import AioS3RegionRedirector\r\n---> 13 from . import waiter\r\n 14 \r\n 15 history_recorder = get_global_history_recorder()\r\n\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/aiobotocore\/waiter.py in \r\n 4 from botocore.exceptions import ClientError\r\n 5 from botocore.waiter import WaiterModel # noqa: F401, lgtm[py\/unused-import]\r\n----> 6 from botocore.waiter import Waiter, xform_name, logger, WaiterError, \\\r\n 7 NormalizedOperationMethod as _NormalizedOperationMethod, is_valid_waiter_error\r\n 8 from botocore.docs.docstring import WaiterDocstring\r\n\r\nImportError: cannot import name 'is_valid_waiter_error'\r\n```\r\n\r\nPlease let me know if there's anything else I can add to post.\r\n\r\n [1]: https:\/\/github.com\/huggingface\/notebooks\/blob\/master\/examples\/text_classification_quantization_inc.ipynb","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3554\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3554\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3553","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3553\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3553\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3553\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3553","id":1097252275,"node_id":"I_kwDODunzps5BZr2z","number":3553,"title":"set_format(\"np\") no longer works for Image data","user":{"login":"cgarciae","id":5862228,"node_id":"MDQ6VXNlcjU4NjIyMjg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5862228?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cgarciae","html_url":"https:\/\/github.com\/cgarciae","followers_url":"https:\/\/api.github.com\/users\/cgarciae\/followers","following_url":"https:\/\/api.github.com\/users\/cgarciae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cgarciae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cgarciae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cgarciae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cgarciae\/orgs","repos_url":"https:\/\/api.github.com\/users\/cgarciae\/repos","events_url":"https:\/\/api.github.com\/users\/cgarciae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cgarciae\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2022-01-09T17:18:13Z","updated_at":"2022-01-13T13:39:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n`dataset.set_format(\"np\")` no longer works for image data, previously you could load the MNIST like this:\r\n\r\n```python\r\ndataset = load_dataset(\"mnist\")\r\ndataset.set_format(\"np\")\r\nX_train = dataset[\"train\"][\"image\"][..., None] # <== No longer a numpy array\r\n```\r\nbut now it doesn't work, `set_format(\"np\")` seems to have no effect and the dataset just returns a list\/array of PIL images instead of numpy arrays as requested.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3553\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3553\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3552","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3552\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3552\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3552\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3552","id":1096985204,"node_id":"PR_kwDODunzps4wsM29","number":3552,"title":"Add the KMWP & DKTC dataset.","user":{"login":"sooftware","id":42150335,"node_id":"MDQ6VXNlcjQyMTUwMzM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42150335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sooftware","html_url":"https:\/\/github.com\/sooftware","followers_url":"https:\/\/api.github.com\/users\/sooftware\/followers","following_url":"https:\/\/api.github.com\/users\/sooftware\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sooftware\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sooftware\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sooftware\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sooftware\/orgs","repos_url":"https:\/\/api.github.com\/users\/sooftware\/repos","events_url":"https:\/\/api.github.com\/users\/sooftware\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sooftware\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-08T17:12:14Z","updated_at":"2022-01-11T14:13:30Z","closed_at":"2022-01-11T14:13:30Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3552","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3552","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3552.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3552.patch","merged_at":null},"body":"Add the KMWP & DKTC dataset.\r\n \r\nAdditional notes:\r\n- Both datasets will be released on January 10 through the GitHub link below.\r\n - https:\/\/github.com\/tunib-ai\/DKTC\r\n - https:\/\/github.com\/tunib-ai\/KMWP\r\n- So it doesn't work as a link at the moment, but the code will work soon (after it is released on January 10).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3552\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3552\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3551","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3551\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3551\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3551\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3551","id":1096561111,"node_id":"PR_kwDODunzps4wq_AO","number":3551,"title":"Add more compression types for `to_json`","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2022-01-07T18:25:02Z","updated_at":"2022-02-04T14:06:39Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3551","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3551","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3551.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3551.patch","merged_at":null},"body":"This PR adds `bz2`, `xz`, and `zip` (WIP) for `to_json`. I also plan to add `infer` like how `pandas` does it","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3551\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3551\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3550","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3550\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3550\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3550\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3550","id":1096522377,"node_id":"I_kwDODunzps5BW5qJ","number":3550,"title":"Bug in `openbookqa` dataset","user":{"login":"lucadiliello","id":23355969,"node_id":"MDQ6VXNlcjIzMzU1OTY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23355969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucadiliello","html_url":"https:\/\/github.com\/lucadiliello","followers_url":"https:\/\/api.github.com\/users\/lucadiliello\/followers","following_url":"https:\/\/api.github.com\/users\/lucadiliello\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucadiliello\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucadiliello\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucadiliello\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucadiliello\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucadiliello\/repos","events_url":"https:\/\/api.github.com\/users\/lucadiliello\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucadiliello\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-07T17:32:57Z","updated_at":"2022-01-17T13:16:33Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nDataset entries contains a typo.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> obqa = load_dataset('openbookqa', 'main')\r\n>>> obqa['train'][0]\r\n```\r\n\r\n## Expected results\r\n```python\r\n{'id': '7-980', 'question_stem': 'The sun is responsible for', 'choices': {'text': ['puppies learning new tricks', 'children growing up and getting old', 'flowers wilting in a vase', 'plants sprouting, blooming and wilting'], 'label': ['A', 'B', 'C', 'D']}, 'answerKey': 'D'}\r\n```\r\n\r\n## Actual results\r\n```python\r\n{'id': '7-980', 'question_stem': 'The sun is responsible for', 'choices': {'text': ['puppies learning new tricks', 'children growing up and getting old', 'flowers wilting in a vase', 'plants sprouting, blooming and wilting'], 'label': ['puppies learning new tricks', 'children growing up and getting old', 'flowers wilting in a vase', 'plants sprouting, blooming and wilting']}, 'answerKey': 'D'}\r\n```\r\n\r\nThe bug is present in all configs and all splits.\r\n\r\n## Environment info\r\n- `datasets` version: 1.17.0\r\n- Platform: Linux-5.4.0-1057-aws-x86_64-with-glibc2.27\r\n- Python version: 3.9.7\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3550\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3550\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3549","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3549\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3549\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3549\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3549","id":1096426996,"node_id":"PR_kwDODunzps4wqkGt","number":3549,"title":"Fix sem_eval_2018_task_1 download location","user":{"login":"maxpel","id":31095360,"node_id":"MDQ6VXNlcjMxMDk1MzYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31095360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxpel","html_url":"https:\/\/github.com\/maxpel","followers_url":"https:\/\/api.github.com\/users\/maxpel\/followers","following_url":"https:\/\/api.github.com\/users\/maxpel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxpel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxpel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxpel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxpel\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxpel\/repos","events_url":"https:\/\/api.github.com\/users\/maxpel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxpel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-07T15:37:52Z","updated_at":"2022-01-27T15:52:03Z","closed_at":"2022-01-27T15:52:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3549","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3549","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3549.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3549.patch","merged_at":null},"body":"This changes the download location of sem_eval_2018_task_1 files to include the test set labels as discussed in https:\/\/github.com\/huggingface\/datasets\/issues\/2745#issuecomment-954588500_ with @lhoestq.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3549\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3549\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3548","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3548\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3548\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3548\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3548","id":1096409512,"node_id":"I_kwDODunzps5BWeGo","number":3548,"title":"Specify the feature types of a dataset on the Hub without needing a dataset script","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"abidlabs","id":1778297,"node_id":"MDQ6VXNlcjE3NzgyOTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1778297?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abidlabs","html_url":"https:\/\/github.com\/abidlabs","followers_url":"https:\/\/api.github.com\/users\/abidlabs\/followers","following_url":"https:\/\/api.github.com\/users\/abidlabs\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abidlabs\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abidlabs\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abidlabs\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abidlabs\/orgs","repos_url":"https:\/\/api.github.com\/users\/abidlabs\/repos","events_url":"https:\/\/api.github.com\/users\/abidlabs\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abidlabs\/received_events","type":"User","site_admin":false},"assignees":[{"login":"abidlabs","id":1778297,"node_id":"MDQ6VXNlcjE3NzgyOTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1778297?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abidlabs","html_url":"https:\/\/github.com\/abidlabs","followers_url":"https:\/\/api.github.com\/users\/abidlabs\/followers","following_url":"https:\/\/api.github.com\/users\/abidlabs\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abidlabs\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abidlabs\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abidlabs\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abidlabs\/orgs","repos_url":"https:\/\/api.github.com\/users\/abidlabs\/repos","events_url":"https:\/\/api.github.com\/users\/abidlabs\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abidlabs\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-07T15:17:06Z","updated_at":"2022-01-20T14:48:38Z","closed_at":"2022-01-20T14:48:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nCurrently if I upload a CSV with paths to audio files, the column type is string instead of Audio.\r\n\r\n**Describe the solution you'd like**\r\nI'd like to be able to specify the types of the column, so that when loading the dataset I directly get the features types I want.\r\n\r\nThe feature types could read from the `dataset_infos.json` for example.\r\n\r\n**Describe alternatives you've considered**\r\nCreate a dataset script to specify the features, but that seems complicated for a simple thing.\r\n\r\ncc @abidlabs ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3548\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3548\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3547","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3547\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3547\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3547\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3547","id":1096405515,"node_id":"I_kwDODunzps5BWdIL","number":3547,"title":"Datasets created with `push_to_hub` can't be accessed in offline mode","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-07T15:12:25Z","updated_at":"2022-01-10T10:44:44Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIn offline mode, one can still access previously-cached datasets. This fails with datasets created with `push_to_hub`.\r\n\r\n## Steps to reproduce the bug\r\nin Python:\r\n```\r\nimport datasets\r\nmpwiki = datasets.load_dataset(\"teven\/matched_passages_wikidata\")\r\n```\r\nin bash:\r\n```\r\nexport HF_DATASETS_OFFLINE=1\r\n```\r\nin Python:\r\n```\r\nimport datasets\r\nmpwiki = datasets.load_dataset(\"teven\/matched_passages_wikidata\")\r\n```\r\n\r\n## Expected results\r\n`datasets` should find the previously-cached dataset.\r\n\r\n## Actual results\r\nConnectionError: Couln't reach the Hugging Face Hub for dataset 'teven\/matched_passages_wikidata': Offline mode is enabled\r\n\r\n## Environment info\r\n- `datasets` version: 1.16.2.dev0\r\n- Platform: Linux-4.18.0-193.70.1.el8_2.x86_64-x86_64-with-glibc2.17\r\n- Python version: 3.8.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3547\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3547\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3546","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3546\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3546\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3546\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3546","id":1096367684,"node_id":"PR_kwDODunzps4wqYIV","number":3546,"title":"Remove print statements in datasets","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-07T14:30:24Z","updated_at":"2022-01-07T18:09:16Z","closed_at":"2022-01-07T18:09:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3546","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3546","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3546.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3546.patch","merged_at":"2022-01-07T18:09:15Z"},"body":"This is a second time I'm removing print statements in our datasets, so I've added a test to avoid these issues in the future.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3546\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3546\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3545","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3545\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3545\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3545\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3545","id":1096189889,"node_id":"PR_kwDODunzps4wpziv","number":3545,"title":"fix: \ud83d\udc1b pass token when retrieving the split names","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-07T10:29:22Z","updated_at":"2022-01-10T10:51:47Z","closed_at":"2022-01-10T10:51:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3545","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3545","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3545.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3545.patch","merged_at":"2022-01-10T10:51:46Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3545\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3545\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3544","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3544\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3544\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3544\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3544","id":1095784681,"node_id":"I_kwDODunzps5BUFjp","number":3544,"title":"Ability to split a dataset in multiple files.","user":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-06T23:02:25Z","updated_at":"2022-01-06T23:02:25Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\n\r\n**Is your feature request related to a problem? Please describe.**\r\n\r\nMy use case is that I have one writer that adds columns and multiple workers reading the same `Dataset`. Each worker should have access to columns added by the writer when they reload the dataset.\r\n\r\nI understand that we shouldn't overwrite an arrow file as this could cause Segfault and so on. Before 1.16, I was able to overwrite the dataset and that would work most of the time with some retries.\r\n\r\n**Describe the solution you'd like**\r\n\r\nI was thinking that if we could append `Dataset._data_files`, when the workers reload the Dataset, they would get the new columns.\r\n\r\n\r\n**Describe alternatives you've considered**\r\nI currently need to\r\n1. Save multiple \"versions\" of the dataset and load the latest.\r\n2. Try working with cache files to get the latest columns.\r\n\r\n**Additional context**\r\n\r\nI think this would be a great addition to HFDataset as Parquet supports multi-files input out of the box!\r\n\r\nI can make a PR myself with some pointers as needed :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3544\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3544\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3543","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3543\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3543\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3543\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3543","id":1095226438,"node_id":"I_kwDODunzps5BR9RG","number":3543,"title":"Allow loading community metrics from the hub, just like datasets","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2022-01-06T11:26:26Z","updated_at":"2022-01-09T20:28:13Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nCurrently, I can load a metric implemented by me by providing the local path to the file in `load_metric`.\r\nHowever, there is no option to do it with the metric uploaded to the hub.\r\nThis means that if I want to allow other users to use it, they must download it first which makes the usage less smooth.\r\n\r\n**Describe the solution you'd like**\r\nLoad metrics from the hub just like datasets are loaded.\r\nIn order to not break stuff, the convention can be to put the metric file in a \"metrics\" folder in the hub.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3543\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3543\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3542","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3542\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3542\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3542\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3542","id":1095088485,"node_id":"PR_kwDODunzps4wmPIP","number":3542,"title":"Update the CC-100 dataset card","user":{"login":"aajanki","id":353043,"node_id":"MDQ6VXNlcjM1MzA0Mw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/353043?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aajanki","html_url":"https:\/\/github.com\/aajanki","followers_url":"https:\/\/api.github.com\/users\/aajanki\/followers","following_url":"https:\/\/api.github.com\/users\/aajanki\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aajanki\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aajanki\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aajanki\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aajanki\/orgs","repos_url":"https:\/\/api.github.com\/users\/aajanki\/repos","events_url":"https:\/\/api.github.com\/users\/aajanki\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aajanki\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-06T08:35:18Z","updated_at":"2022-01-06T18:37:44Z","closed_at":"2022-01-06T18:37:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3542","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3542","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3542.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3542.patch","merged_at":"2022-01-06T18:37:44Z"},"body":"* summary from the dataset homepage\r\n* more details about the data structure\r\n* this dataset does not contain annotations","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3542\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3542\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3541","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3541\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3541\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3541\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3541","id":1095033828,"node_id":"I_kwDODunzps5BROPk","number":3541,"title":"Support 7-zip compressed data files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-06T07:11:03Z","updated_at":"2022-01-19T14:01:18Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nWe should support 7-zip compressed data files:\r\n- in `extract`\r\n- in `iter_archive`\r\n\r\nboth in streaming and non-streaming modes.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3541\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3541\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3540","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3540\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3540\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3540\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3540","id":1094900336,"node_id":"I_kwDODunzps5BQtpw","number":3540,"title":"How to convert torch.utils.data.Dataset to datasets.arrow_dataset.Dataset?","user":{"login":"CindyTing","id":35062414,"node_id":"MDQ6VXNlcjM1MDYyNDE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35062414?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/CindyTing","html_url":"https:\/\/github.com\/CindyTing","followers_url":"https:\/\/api.github.com\/users\/CindyTing\/followers","following_url":"https:\/\/api.github.com\/users\/CindyTing\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/CindyTing\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/CindyTing\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/CindyTing\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/CindyTing\/orgs","repos_url":"https:\/\/api.github.com\/users\/CindyTing\/repos","events_url":"https:\/\/api.github.com\/users\/CindyTing\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/CindyTing\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-06T02:13:42Z","updated_at":"2022-01-06T02:17:39Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI use torch.utils.data.Dataset to define my own data, but I need to use the 'map' function of datasets.arrow_dataset.Dataset later, so I hope to convert torch.utils.data.Dataset to datasets.arrow_dataset.Dataset.\r\n\r\nHere is an example.\r\n```\r\nfrom torch.utils.data import Dataset\r\nfrom datasets.arrow_dataset import Dataset as HFDataset\r\n\r\nclass ADataset(Dataset):\r\n def __init__(self, data): \r\n super().__init__()\r\n self.data = data\r\n def __getitem__(self, index):\r\n return self.data[index]\r\n def __len__(self):\r\n return self.len\r\nclass MDataset():\r\n def __init__(self, tokenizer: AutoTokenizer, data_args, training_args):\r\n self.train_dataset = ADataset(data_args)\r\n self.tokenizer = tokenizer\r\n self.data_args = data_args\r\n\r\n self.train_dataset = self.train_dataset.map(\r\n self.process_function,\r\n batched=True,\r\n remove_columns=column_names,\r\n load_from_cache_file=True,\r\n desc=\"Running tokenizer on train dataset\",\r\n )\r\n def process_function(self, examples):\r\n sentences = [\" \".join(sample[0][3]) for sample in examples]\r\n\r\n tokenized = self.tokenizer(\r\n sentences,\r\n max_length=self.max_seq_len,\r\n padding=self.padding, \r\n truncation=True)\r\n```\r\nBut it would raise an ERROR, AttributeError: 'ADataset' object has no attribute 'map'.\r\n\r\nso how to convert torch.utils.data.Dataset to datasets.arrow_dataset.Dataset?\r\n\r\nThanks in advance!\r\n\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3540\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3540\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3539","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3539\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3539\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3539\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3539","id":1094813242,"node_id":"PR_kwDODunzps4wlXU4","number":3539,"title":"Research wording for nc licenses","user":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"assignees":[{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-05T23:01:38Z","updated_at":"2022-01-06T18:58:20Z","closed_at":"2022-01-06T18:58:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3539","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3539","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3539.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3539.patch","merged_at":"2022-01-06T18:58:19Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3539\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3539\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3538","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3538\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3538\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3538\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3538","id":1094756755,"node_id":"PR_kwDODunzps4wlLmD","number":3538,"title":"Readme usage update","user":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"assignees":[{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-05T21:26:28Z","updated_at":"2022-01-05T23:34:25Z","closed_at":"2022-01-05T23:24:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3538","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3538","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3538.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3538.patch","merged_at":"2022-01-05T23:24:15Z"},"body":"Noticing that the recent commit throws a lot of errors in the automatic checks. It looks to me that those errors are simply errors that were already there (metadata issues), unrelated to what I've just changed, but worth another look to make sure.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3538\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3538\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3537","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3537\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3537\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3537\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3537","id":1094738734,"node_id":"PR_kwDODunzps4wlH1d","number":3537,"title":"added PII statements and license links to data cards","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-05T20:59:21Z","updated_at":"2022-01-05T22:02:37Z","closed_at":"2022-01-05T22:02:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3537","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3537","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3537.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3537.patch","merged_at":"2022-01-05T22:02:37Z"},"body":"Updates for the following datacards:\r\nmultilingual_librispeech\r\nopenslr\r\nspeech commands\r\nsuperb\r\ntimit_asr\r\nvctk","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3537\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3537\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3536","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3536\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3536\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3536\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3536","id":1094645771,"node_id":"PR_kwDODunzps4wk0Yb","number":3536,"title":"update `pretty_name` for all datasets","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-05T18:45:05Z","updated_at":"2022-01-12T22:59:46Z","closed_at":"2022-01-12T22:59:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3536","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3536","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3536.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3536.patch","merged_at":"2022-01-12T22:59:45Z"},"body":"This PR updates `pretty_name` for all datasets. Previous PR #3498 had done this for only first 200 datasets","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3536\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3536\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3535","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3535\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3535\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3535\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3535","id":1094633214,"node_id":"PR_kwDODunzps4wkxv0","number":3535,"title":"Add SVHN dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-05T18:29:09Z","updated_at":"2022-01-12T14:14:35Z","closed_at":"2022-01-12T14:14:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3535","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3535","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3535.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3535.patch","merged_at":"2022-01-12T14:14:35Z"},"body":"Add the SVHN dataset. \r\n\r\nAdditional notes:\r\n* compared to the TFDS implementation, exposes additional the \"full numbers\" config\r\n* adds the streaming support for `os.path.splitext` and `scipy.io.loadmat` \r\n* adds `h5py` to the requirements list for the dummy data test","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3535\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3535\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3534","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3534\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3534\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3534\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3534","id":1094352449,"node_id":"PR_kwDODunzps4wj3LE","number":3534,"title":"Update wiki_dpr README.md","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-05T13:29:44Z","updated_at":"2022-01-05T14:16:52Z","closed_at":"2022-01-05T14:16:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3534","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3534","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3534.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3534.patch","merged_at":"2022-01-05T14:16:51Z"},"body":"Some infos of wiki_dpr were missing as noted in https:\/\/github.com\/huggingface\/datasets\/issues\/3510, I added them and updated the tags and the examples","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3534\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3534\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3533","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3533\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3533\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3533\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3533","id":1094156147,"node_id":"I_kwDODunzps5BN39z","number":3533,"title":"Task search function on hub not working correctly","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2022-01-05T09:36:30Z","updated_at":"2022-01-05T10:03:08Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I want to look at all datasets of the category: `speech-processing` *i.e.* https:\/\/huggingface.co\/datasets?task_categories=task_categories:speech-processing&sort=downloads , then the following dataset doesn't show up for some reason:\r\n\r\n- https:\/\/huggingface.co\/datasets\/speech_commands\r\n\r\neven thought it's task tags seem correct: \r\nhttps:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/speech_commands\/README.md","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3533\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3533\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3532","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3532\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3532\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3532\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3532","id":1094035066,"node_id":"PR_kwDODunzps4wi1ft","number":3532,"title":"Give clearer instructions to add the YAML tags","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-05T06:47:52Z","updated_at":"2022-01-17T15:54:37Z","closed_at":"2022-01-17T15:54:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3532","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3532","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3532.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3532.patch","merged_at":"2022-01-17T15:54:36Z"},"body":"Fix #3531.\r\n\r\nCC: @julien-c @VictorSanh ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3532\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3532\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3531","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3531\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3531\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3531\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3531","id":1094033280,"node_id":"I_kwDODunzps5BNZ-A","number":3531,"title":"Give clearer instructions to add the YAML tags","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-05T06:44:20Z","updated_at":"2022-01-17T15:54:36Z","closed_at":"2022-01-17T15:54:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAs reported by @julien-c, many community datasets contain the line `YAML tags:` at the top of the YAML section in the header of the README file. See e.g.: https:\/\/huggingface.co\/datasets\/bigscience\/P3\/commit\/a03bea08cf4d58f268b469593069af6aeb15de32\r\n\r\nMaybe we should give clearer instruction\/hints in the README template.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3531\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3531\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3530","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3530\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3530\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3530\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3530","id":1093894732,"node_id":"PR_kwDODunzps4wiZCw","number":3530,"title":"Update README.md","user":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"assignees":[{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-05T01:32:07Z","updated_at":"2022-01-05T12:50:51Z","closed_at":"2022-01-05T12:50:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3530","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3530","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3530.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3530.patch","merged_at":"2022-01-05T12:50:50Z"},"body":"Removing reference to \"Common Voice\" in Personal and Sensitive Information section.\r\nAdding link to license.\r\nCorrect license type in metadata.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3530\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3530\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3529","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3529\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3529\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3529\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3529","id":1093846356,"node_id":"PR_kwDODunzps4wiPA9","number":3529,"title":"Update README.md","user":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"assignees":[{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-04T23:52:47Z","updated_at":"2022-01-05T12:50:15Z","closed_at":"2022-01-05T12:50:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3529","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3529","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3529.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3529.patch","merged_at":"2022-01-05T12:50:14Z"},"body":"Updating licensing information & personal and sensitive information.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3529\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3529\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3528","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3528\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3528\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3528\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3528","id":1093844616,"node_id":"PR_kwDODunzps4wiOqH","number":3528,"title":"Update README.md","user":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"assignees":[{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-04T23:48:11Z","updated_at":"2022-01-05T12:49:41Z","closed_at":"2022-01-05T12:49:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3528","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3528","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3528.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3528.patch","merged_at":"2022-01-05T12:49:40Z"},"body":"Updating license with appropriate capitalization & a link.\r\nUpdating Personal and Sensitive Information to address PII concern.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3528\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3528\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3527","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3527\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3527\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3527\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3527","id":1093840707,"node_id":"PR_kwDODunzps4wiN1w","number":3527,"title":"Update README.md","user":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"assignees":[{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-04T23:39:41Z","updated_at":"2022-01-05T00:23:50Z","closed_at":"2022-01-05T00:23:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3527","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3527","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3527.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3527.patch","merged_at":"2022-01-05T00:23:50Z"},"body":"Adding licensing information.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3527\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3527\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3526","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3526\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3526\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3526\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3526","id":1093833446,"node_id":"PR_kwDODunzps4wiMaQ","number":3526,"title":"Update README.md","user":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"assignees":[{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-04T23:25:23Z","updated_at":"2022-01-04T23:30:08Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3526","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3526","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3526.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3526.patch","merged_at":null},"body":"Not entirely sure, following the links here, but it seems the relevant license is at https:\/\/github.com\/soskek\/bookcorpus\/blob\/master\/LICENSE","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3526\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3526\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3525","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3525\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3525\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3525\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3525","id":1093831268,"node_id":"PR_kwDODunzps4wiL8p","number":3525,"title":"Adding license information for Openbookcorpus","user":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"assignees":[{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-01-04T23:20:36Z","updated_at":"2022-01-05T12:48:38Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3525","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3525","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3525.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3525.patch","merged_at":null},"body":"Not entirely sure, following the links here, but it seems the relevant license is at https:\/\/github.com\/soskek\/bookcorpus\/blob\/master\/LICENSE","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3525\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3525\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3524","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3524\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3524\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3524\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3524","id":1093826723,"node_id":"PR_kwDODunzps4wiK_v","number":3524,"title":"Adding link to license.","user":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"assignees":[{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2022-01-04T23:11:48Z","updated_at":"2022-01-05T12:31:38Z","closed_at":"2022-01-05T12:31:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3524","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3524","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3524.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3524.patch","merged_at":"2022-01-05T12:31:37Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3524\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3524\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3523","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3523\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3523\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3523\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3523","id":1093819227,"node_id":"PR_kwDODunzps4wiJc2","number":3523,"title":"Added links to licensing and PII message in vctk dataset","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-04T22:56:58Z","updated_at":"2022-01-06T19:33:50Z","closed_at":"2022-01-06T19:33:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3523","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3523","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3523.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3523.patch","merged_at":"2022-01-06T19:33:50Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3523\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3523\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3522","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3522\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3522\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3522\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3522","id":1093807586,"node_id":"I_kwDODunzps5BMi3i","number":3522,"title":"wmt19 is broken (zh-en)","user":{"login":"AjayP13","id":5404177,"node_id":"MDQ6VXNlcjU0MDQxNzc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5404177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AjayP13","html_url":"https:\/\/github.com\/AjayP13","followers_url":"https:\/\/api.github.com\/users\/AjayP13\/followers","following_url":"https:\/\/api.github.com\/users\/AjayP13\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AjayP13\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AjayP13\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AjayP13\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AjayP13\/orgs","repos_url":"https:\/\/api.github.com\/users\/AjayP13\/repos","events_url":"https:\/\/api.github.com\/users\/AjayP13\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AjayP13\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-04T22:33:45Z","updated_at":"2022-01-17T13:16:55Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"wmt19\", 'zh-en')\r\n```\r\n\r\n## Expected results\r\nThe dataset should download.\r\n\r\n## Actual results\r\n`ConnectionError: Couldn't reach ftp:\/\/cwmt-wmt:cwmt-wmt@datasets.nju.edu.cn\/parallel\/casia2015.zip`\r\n\r\n## Environment info\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux\r\n- Python version: 3.8 \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3522\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3522\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3521","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3521\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3521\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3521\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3521","id":1093797947,"node_id":"PR_kwDODunzps4wiFCs","number":3521,"title":"Vivos license update","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-04T22:17:47Z","updated_at":"2022-01-04T22:18:16Z","closed_at":"2022-01-04T22:18:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3521","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3521","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3521.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3521.patch","merged_at":null},"body":"Updated the license information with the link to the license text ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3521\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3521\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3520","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3520\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3520\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3520\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3520","id":1093747753,"node_id":"PR_kwDODunzps4wh6oD","number":3520,"title":"Audio datacard update - first pass","user":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false},"assignees":[{"login":"meg-huggingface","id":90473723,"node_id":"MDQ6VXNlcjkwNDczNzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90473723?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/meg-huggingface","html_url":"https:\/\/github.com\/meg-huggingface","followers_url":"https:\/\/api.github.com\/users\/meg-huggingface\/followers","following_url":"https:\/\/api.github.com\/users\/meg-huggingface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/meg-huggingface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/meg-huggingface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/meg-huggingface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/meg-huggingface\/orgs","repos_url":"https:\/\/api.github.com\/users\/meg-huggingface\/repos","events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/meg-huggingface\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2022-01-04T20:58:25Z","updated_at":"2022-01-05T12:30:21Z","closed_at":"2022-01-05T12:30:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3520","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3520","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3520.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3520.patch","merged_at":"2022-01-05T12:30:20Z"},"body":"Filling out data card \"Personal and Sensitive Information\" for speech datasets to note PII concerns","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3520\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3520\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3519","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3519\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3519\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3519\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3519","id":1093655205,"node_id":"PR_kwDODunzps4whnXH","number":3519,"title":"CC100: Using HTTPS for the data source URL fixes load_dataset()","user":{"login":"aajanki","id":353043,"node_id":"MDQ6VXNlcjM1MzA0Mw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/353043?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aajanki","html_url":"https:\/\/github.com\/aajanki","followers_url":"https:\/\/api.github.com\/users\/aajanki\/followers","following_url":"https:\/\/api.github.com\/users\/aajanki\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aajanki\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aajanki\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aajanki\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aajanki\/orgs","repos_url":"https:\/\/api.github.com\/users\/aajanki\/repos","events_url":"https:\/\/api.github.com\/users\/aajanki\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aajanki\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-04T18:45:54Z","updated_at":"2022-01-05T17:28:34Z","closed_at":"2022-01-05T17:28:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3519","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3519","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3519.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3519.patch","merged_at":"2022-01-05T17:28:34Z"},"body":"Without this change the following script (with any lang parameter) consistently fails. After changing to the HTTPS URL, the script works as expected.\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"cc100\", lang=\"en\")\r\n```\r\n\r\nThis is the error produced by the previous script:\r\n```sh\r\nUsing custom data configuration en-lang=en\r\nDownloading and preparing dataset cc100\/en to \/home\/antti\/.cache\/huggingface\/datasets\/cc100\/en-lang=en\/0.0.0\/526ac20780de5e074cf73a7466e868cb67f960b48f6de42ff6a6c4e71910d71b...\r\nTraceback (most recent call last):\r\n File \"\/home\/antti\/tmp\/cc100\/cc100.py\", line 3, in \r\n dataset = load_dataset(\"cc100\", lang=\"en\")\r\n File \"\/home\/antti\/tmp\/cc100\/venv\/lib\/python3.9\/site-packages\/datasets\/load.py\", line 1694, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/antti\/tmp\/cc100\/venv\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 595, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/antti\/tmp\/cc100\/venv\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 661, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/antti\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/cc100\/526ac20780de5e074cf73a7466e868cb67f960b48f6de42ff6a6c4e71910d71b\/cc100.py\", line 117, in _split_generators\r\n path = dl_manager.download_and_extract(download_url)\r\n File \"\/home\/antti\/tmp\/cc100\/venv\/lib\/python3.9\/site-packages\/datasets\/utils\/download_manager.py\", line 308, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/home\/antti\/tmp\/cc100\/venv\/lib\/python3.9\/site-packages\/datasets\/utils\/download_manager.py\", line 196, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/home\/antti\/tmp\/cc100\/venv\/lib\/python3.9\/site-packages\/datasets\/utils\/py_utils.py\", line 251, in map_nested\r\n return function(data_struct)\r\n File \"\/home\/antti\/tmp\/cc100\/venv\/lib\/python3.9\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/home\/antti\/tmp\/cc100\/venv\/lib\/python3.9\/site-packages\/datasets\/utils\/file_utils.py\", line 298, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/home\/antti\/tmp\/cc100\/venv\/lib\/python3.9\/site-packages\/datasets\/utils\/file_utils.py\", line 617, in get_from_cache\r\n raise ConnectionError(f\"Couldn't reach {url} (error {response.status_code})\")\r\nConnectionError: Couldn't reach http:\/\/data.statmt.org\/cc-100\/en.txt.xz (error 503)\r\n```\r\n\r\nNote that I get the same behavior also using curl on the command line. The plain HTTP \"curl -L http:\/\/data.statmt.org\/cc-100\/en.txt.xz\" fails with \"503 Service unavailable\", but the with the HTTPS version of the URL curl starts downloading the file.\r\n\r\nMy guess is that the server does overly aggressive rate-limitting. When a client requests an HTTP URL, it (sensibly) gets redirected to the HTTPS equivalent, but now the server notices two requests coming from the same client (the original HTTP and the redirected HTTPS) during a brief time windows and rate-limitter kicks in and blocks the second request! If the client initally uses the HTTPS URL there's only one incoming request which the rate-limitter allows.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3519\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3519\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3518","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3518\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3518\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3518\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3518","id":1093063455,"node_id":"I_kwDODunzps5BJtMf","number":3518,"title":"Add PubMed Central Open Access dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2022-01-04T06:54:35Z","updated_at":"2022-01-17T15:25:57Z","closed_at":"2022-01-17T15:25:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** PubMed Central Open Access\r\n- **Description:** The PMC Open Access Subset includes more than 3.4 million journal articles and preprints that are made available under license terms that allow reuse.\r\n- **Paper:** *link to the dataset paper if available*\r\n- **Data:** https:\/\/www.ncbi.nlm.nih.gov\/pmc\/tools\/openftlist\/\r\n- **Motivation:** *what are some good reasons to have this dataset*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3518\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3518\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3517","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3517\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3517\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3517\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3517","id":1092726651,"node_id":"PR_kwDODunzps4wemwU","number":3517,"title":"Add CPPE-5 dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-03T18:31:20Z","updated_at":"2022-01-19T02:23:37Z","closed_at":"2022-01-05T18:53:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3517","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3517","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3517.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3517.patch","merged_at":"2022-01-05T18:53:02Z"},"body":"Adds the recently released CPPE-5 dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3517\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3517\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3516","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3516\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3516\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3516\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3516","id":1092657738,"node_id":"PR_kwDODunzps4weYhE","number":3516,"title":"dataset `asset` - change to raw.githubusercontent.com URLs","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-03T16:43:57Z","updated_at":"2022-01-03T17:39:02Z","closed_at":"2022-01-03T17:39:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3516","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3516","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3516.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3516.patch","merged_at":"2022-01-03T17:39:01Z"},"body":"Changed the URLs to the ones it was automatically re-directing.\r\nBefore, the download was failing","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3516\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3516\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3515","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3515\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3515\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3515\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3515","id":1092624695,"node_id":"I_kwDODunzps5BICE3","number":3515,"title":"`ExpectedMoreDownloadedFiles` for `evidence_infer_treatment`","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-03T15:58:38Z","updated_at":"2022-01-17T13:25:37Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to load a dataset called `evidence_infer_treatment`. The first subset (`1.1`) works fine but the second returns an error (`2.0`). It downloads a file but crashes during the checksums.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> load_dataset(\"evidence_infer_treatment\", \"2.0\")\r\nDownloading and preparing dataset evidence_infer_treatment\/2.0 (download: 34.84 MiB, generated: 91.46 MiB, post-processed: Unknown size, total: 126.30 MiB) to \/home\/victor_huggingface_co\/.cache\/huggingface\/datasets\/evidence_infer_treatment\/2.0\/2.0.0\/6812655bfd26cbaa58c84eab098bf6403694b06c6ae2ded603c55681868a1e24...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 1669, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 594, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 664, in _download_and_prepare\r\n self.info.download_checksums, dl_manager.get_recorded_sizes_checksums(), \"dataset source files\"\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/utils\/info_utils.py\", line 33, in verify_checksums\r\n raise ExpectedMoreDownloadedFiles(str(set(expected_checksums) - set(recorded_checksums)))\r\ndatasets.utils.info_utils.ExpectedMoreDownloadedFiles: {'http:\/\/evidence-inference.ebm-nlp.com\/v2.0.tar.gz'}\r\n```\r\n\r\nI did try to pass the argument `ignore_verifications=True` but run into an error when trying to build the dataset:\r\n```python\r\n>>> load_dataset(\"evidence_infer_treatment\", \"2.0\", ignore_verifications=True, download_mode=\"force_redownload\")\r\nDownloading and preparing dataset evidence_infer_treatment\/2.0 (download: 34.84 MiB, generated: 91.46 MiB, post-processed: Unknown size, total: 126.30 MiB) to \/home\/victor_huggingface_co\/.cache\/huggingface\/datasets\/evidence_infer_treatment\/2.0\/2.0.0\/6812655bfd26cbaa58c84eab098bf6403694b06c6ae2ded603c55681868a1e24...\r\nDownloading: 164MB [00:23, 6.98MB\/s] \r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 1669, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 594, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 681, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1080, in _prepare_split\r\n example = self.info.features.encode_example(record)\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/features\/features.py\", line 1032, in encode_example\r\n return encode_nested_example(self, example)\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/features\/features.py\", line 807, in encode_nested_example\r\n k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/features\/features.py\", line 807, in \r\n k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/features\/features.py\", line 829, in encode_nested_example\r\n list_dict[k] = [encode_nested_example(dict_tuples[0], o) for o in dict_tuples[1:]]\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/features\/features.py\", line 829, in \r\n list_dict[k] = [encode_nested_example(dict_tuples[0], o) for o in dict_tuples[1:]]\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/features\/features.py\", line 828, in encode_nested_example\r\n for k, dict_tuples in utils.zip_dict(schema.feature, *obj):\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 136, in zip_dict\r\n yield key, tuple(d[key] for d in dicts)\r\n File \"\/home\/victor_huggingface_co\/miniconda3\/envs\/promptsource\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 136, in \r\n yield key, tuple(d[key] for d in dicts)\r\nKeyError: ''\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux-5.0.0-1020-gcp-x86_64-with-debian-buster-sid\r\n- Python version: 3.7.11\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3515\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3515\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3514","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3514\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3514\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3514\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3514","id":1092606383,"node_id":"PR_kwDODunzps4weN9W","number":3514,"title":"Fix to_tf_dataset references in docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-03T15:31:39Z","updated_at":"2022-01-05T18:52:48Z","closed_at":"2022-01-05T18:52:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3514","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3514","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3514.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3514.patch","merged_at":"2022-01-05T18:52:47Z"},"body":"Fix the `to_tf_dataset` references in the docs. The currently failing example of usage will be fixed by #3338. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3514\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3514\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3513","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3513\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3513\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3513\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3513","id":1092569802,"node_id":"PR_kwDODunzps4weGWl","number":3513,"title":"Add desc parameter to filter","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2022-01-03T14:44:18Z","updated_at":"2022-01-05T18:31:25Z","closed_at":"2022-01-05T18:31:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3513","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3513","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3513.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3513.patch","merged_at":"2022-01-05T18:31:24Z"},"body":"Fix #3317 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3513\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3513\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3512","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3512\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3512\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3512\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3512","id":1092359973,"node_id":"I_kwDODunzps5BHBcl","number":3512,"title":"No Data format found","user":{"login":"shazzad47","id":57741378,"node_id":"MDQ6VXNlcjU3NzQxMzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57741378?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shazzad47","html_url":"https:\/\/github.com\/shazzad47","followers_url":"https:\/\/api.github.com\/users\/shazzad47\/followers","following_url":"https:\/\/api.github.com\/users\/shazzad47\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shazzad47\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shazzad47\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shazzad47\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shazzad47\/orgs","repos_url":"https:\/\/api.github.com\/users\/shazzad47\/repos","events_url":"https:\/\/api.github.com\/users\/shazzad47\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shazzad47\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-03T09:41:11Z","updated_at":"2022-01-17T13:26:05Z","closed_at":"2022-01-17T13:26:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*name of the dataset*'\r\n\r\n**Link:** *link to the dataset viewer page*\r\n\r\n*short description of the issue*\r\n\r\nAm I the one who added this dataset ? Yes-No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3512\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3512\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3511","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3511\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3511\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3511\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3511","id":1092170411,"node_id":"I_kwDODunzps5BGTKr","number":3511,"title":"Dataset","user":{"login":"MIKURI0114","id":92849978,"node_id":"U_kgDOBYjHOg","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/92849978?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MIKURI0114","html_url":"https:\/\/github.com\/MIKURI0114","followers_url":"https:\/\/api.github.com\/users\/MIKURI0114\/followers","following_url":"https:\/\/api.github.com\/users\/MIKURI0114\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MIKURI0114\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MIKURI0114\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MIKURI0114\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MIKURI0114\/orgs","repos_url":"https:\/\/api.github.com\/users\/MIKURI0114\/repos","events_url":"https:\/\/api.github.com\/users\/MIKURI0114\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MIKURI0114\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2022-01-03T02:03:23Z","updated_at":"2022-01-03T08:41:26Z","closed_at":"2022-01-03T08:23:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*name of the dataset*'\r\n\r\n**Link:** *link to the dataset viewer page*\r\n\r\n*short description of the issue*\r\n\r\nAm I the one who added this dataset ? Yes-No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3511\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3511\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3510","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3510\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3510\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3510\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3510","id":1091997004,"node_id":"I_kwDODunzps5BFo1M","number":3510,"title":"`wiki_dpr` details for Open Domain Question Answering tasks","user":{"login":"pk1130","id":40918514,"node_id":"MDQ6VXNlcjQwOTE4NTE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40918514?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pk1130","html_url":"https:\/\/github.com\/pk1130","followers_url":"https:\/\/api.github.com\/users\/pk1130\/followers","following_url":"https:\/\/api.github.com\/users\/pk1130\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pk1130\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pk1130\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pk1130\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pk1130\/orgs","repos_url":"https:\/\/api.github.com\/users\/pk1130\/repos","events_url":"https:\/\/api.github.com\/users\/pk1130\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pk1130\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2022-01-02T11:04:01Z","updated_at":"2022-01-05T13:16:05Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hey guys!\r\n\r\nThanks for creating the `wiki_dpr` dataset!\r\n\r\nI am currently trying to use the dataset for context retrieval using DPR on NQ questions and need details about what each of the files and data instances mean, which version of the Wikipedia dump it uses, etc. Please respond at your earliest convenience regarding the same! Thanks a ton!\r\n\r\nP.S.: (If one of @thomwolf @lewtun @lhoestq could respond, that would be even better since they have the first-hand details of the dataset. If anyone else has those, please reach out! Thanks!)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3510\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3510\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3507","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3507\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3507\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3507\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3507","id":1091214808,"node_id":"I_kwDODunzps5BCp3Y","number":3507,"title":"Discuss whether support canonical datasets w\/o dataset_infos.json and\/or dummy data","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2021-12-30T17:04:25Z","updated_at":"2022-02-10T17:18:10Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I open this PR to have a public discussion about this topic and make a decision.\r\n\r\nAs previously discussed, once we have the metadata in the dataset card (README file, containing both Markdown info and YAML tags), what is the point of having also the JSON metadata (dataset_infos.json file)?\r\n\r\nOn the other hand, the dummy data is necessary for testing (in our CI suite) that the canonical dataset loads correctly. However:\r\n- the dataset preview feature is already an indirect test that the dataset loads correctly (it also tests it is streamable though)\r\n- we are migrating canonical datasets to the Hub\r\nDo we really need to continue testing them in out CI?\r\n\r\nAlso note that for generating both (dataset_infos.json file and dummy data), the entire dataset needs being downloaded. This can be an issue for huge datasets (like WIT, with 400 GB of data).\r\n\r\nFeel free to ping other people for the discussion.\r\n\r\nCC: @lhoestq @mariosasko @thomwolf @julien-c @patrickvonplaten @anton-l @LysandreJik @yjernite @nateraw ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3507\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3507\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3506","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3506\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3506\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3506\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3506","id":1091166595,"node_id":"PR_kwDODunzps4wZpot","number":3506,"title":"Allows DatasetDict.filter to have batching option","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-30T15:22:22Z","updated_at":"2022-01-04T10:24:28Z","closed_at":"2022-01-04T10:24:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3506","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3506","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3506.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3506.patch","merged_at":"2022-01-04T10:24:27Z"},"body":"- Related to: #3244\r\n- Fixes: #3503\r\n\r\nWe extends `.filter( ... batched: bool)` support to DatasetDict.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3506\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3506\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3505","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3505\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3505\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3505\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3505","id":1091150820,"node_id":"I_kwDODunzps5BCaPk","number":3505,"title":"cast_column function not working with map function in streaming mode for Audio features","user":{"login":"ashu5644","id":8268102,"node_id":"MDQ6VXNlcjgyNjgxMDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8268102?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ashu5644","html_url":"https:\/\/github.com\/ashu5644","followers_url":"https:\/\/api.github.com\/users\/ashu5644\/followers","following_url":"https:\/\/api.github.com\/users\/ashu5644\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ashu5644\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ashu5644\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ashu5644\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ashu5644\/orgs","repos_url":"https:\/\/api.github.com\/users\/ashu5644\/repos","events_url":"https:\/\/api.github.com\/users\/ashu5644\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ashu5644\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-12-30T14:52:01Z","updated_at":"2022-01-18T19:54:07Z","closed_at":"2022-01-18T19:54:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to use Audio class for loading audio features using custom dataset. I am able to cast 'audio' feature into 'Audio' format with cast_column function. On using map function, I am not getting 'Audio' casted feature but getting path of audio file only.\r\nI am getting features of 'audio' of string type with load_dataset call. After using cast_column 'audio' feature is converted into 'Audio' type. But in map function I am not able to get Audio type for audio feature & getting string type data containing path of file only. So I am not able to use processor in encode function.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nfrom datasets import load_dataset, Audio\r\nfrom transformers import Wav2Vec2Processor\r\n\r\ndef encode(batch, processor):\r\n print(\"Audio: \",batch['audio'])\r\n batch[\"input_values\"] = processor(batch[\"audio\"]['array'], sampling_rate=16000).input_values\r\n return batch\r\n\r\ndef print_ds(ds):\r\n iterator = iter(ds)\r\n for d in iterator:\r\n print(\"Data: \",d)\r\n break\r\n\r\nprocessor = Wav2Vec2Processor.from_pretrained(pretrained_model_path)\r\n\r\ndataset = load_dataset(\"custom_dataset.py\",\"train\",data_files={'train':'train_path.txt'},\r\n data_dir=\"data\", streaming=True, split=\"train\")\r\n\r\nprint(\"Features: \",dataset.features)\r\n\r\nprint_ds(dataset)\r\n\r\ndataset = dataset.cast_column(\"audio\", Audio(sampling_rate=16_000))\r\n\r\nprint(\"Features: \",dataset.features)\r\n\r\nprint_ds(dataset)\r\n\r\ndataset = dataset.map(lambda x: encode(x,processor))\r\n\r\nprint(\"Features: \",dataset.features)\r\n\r\nprint_ds(dataset)\r\n\r\n\r\n\r\n```\r\n\r\n## Expected results\r\n\r\nmap function not printing Audio type features be used with processor function and getting error in processor call due to this.\r\n\r\n## Actual results\r\n\r\n# after load_dataset call\r\nFeatures: {'sentence': Value(dtype='string', id=None), 'audio': Value(dtype='string', id=None)}\r\nData: {'sentence': '\u0914\u0930 \u0905\u092a\u0928\u0947 \u092a\u0947\u091f \u0915\u094b \u092e\u093e\u0901 \u0915\u0940 \u0938\u094d\u0935\u093e\u0926\u093f\u0937\u094d\u091f \u0917\u0930\u092e\u0917\u0930\u092e \u091c\u0932\u0947\u092c\u093f\u092f\u093e\u0901 \u0939\u095c\u092a\u0924\u0947\\n', 'audio': 'data\/0116_003.wav'}\r\n\r\n# after cast_column call\r\nFeatures: {'sentence': Value(dtype='string', id=None), 'audio': Audio(sampling_rate=16000, mono=True, _storage_dtype='string', id=None)}\r\nData: {'sentence': '\u0914\u0930 \u0905\u092a\u0928\u0947 \u092a\u0947\u091f \u0915\u094b \u092e\u093e\u0901 \u0915\u0940 \u0938\u094d\u0935\u093e\u0926\u093f\u0937\u094d\u091f \u0917\u0930\u092e\u0917\u0930\u092e \u091c\u0932\u0947\u092c\u093f\u092f\u093e\u0901 \u0939\u095c\u092a\u0924\u0947\\n', 'audio': {'path': 'data\/0116_003.wav', 'array': array([ 1.2662281e-06, 1.0264218e-06, -1.3615092e-06, ...,\r\n 1.3017889e-02, 1.0085563e-02, 4.8155054e-03], dtype=float32), 'sampling_rate': 16000}}\r\n\r\n# after map call\r\nFeatures: None\r\nAudio: data\/0116_003.wav\r\n\r\nTraceback (most recent call last):\r\n File \"demo2.py\", line 36, in \r\n print_ds(dataset)\r\n File \"demo2.py\", line 11, in print_ds\r\n for d in iterator:\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/iterable_dataset.py\", line 341, in __iter__\r\n for key, example in self._iter():\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/iterable_dataset.py\", line 338, in _iter\r\n yield from ex_iterable\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/iterable_dataset.py\", line 192, in __iter__\r\n yield key, self.function(example)\r\n File \"demo2.py\", line 32, in \r\n dataset = dataset.map(lambda x: batch_encode(x,processor))\r\n File \"demo2.py\", line 6, in batch_encode\r\n batch[\"input_values\"] = processor(batch[\"audio\"]['array'], sampling_rate=16000).input_values\r\nTypeError: string indices must be integers\r\n## Environment info\r\n\r\n- `datasets` version: 1.17.0\r\n- Platform: Linux-4.14.243 with-debian-bullseye-sid\r\n- Python version: 3.7.9\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3505\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3505\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3504","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3504\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3504\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3504\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3504","id":1090682230,"node_id":"I_kwDODunzps5BAn12","number":3504,"title":"Unable to download PUBMED_title_abstracts_2019_baseline.jsonl.zst","user":{"login":"ToddMorrill","id":12600692,"node_id":"MDQ6VXNlcjEyNjAwNjky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12600692?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ToddMorrill","html_url":"https:\/\/github.com\/ToddMorrill","followers_url":"https:\/\/api.github.com\/users\/ToddMorrill\/followers","following_url":"https:\/\/api.github.com\/users\/ToddMorrill\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ToddMorrill\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ToddMorrill\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ToddMorrill\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ToddMorrill\/orgs","repos_url":"https:\/\/api.github.com\/users\/ToddMorrill\/repos","events_url":"https:\/\/api.github.com\/users\/ToddMorrill\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ToddMorrill\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-12-29T18:23:20Z","updated_at":"2022-01-17T13:28:53Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am unable to download the PubMed dataset from the link provided in the [Hugging Face Course (Chapter 5 Section 4)](https:\/\/huggingface.co\/course\/chapter5\/4?fw=pt).\r\n\r\nhttps:\/\/the-eye.eu\/public\/AI\/pile_preliminary_components\/PUBMED_title_abstracts_2019_baseline.jsonl.zst \r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nfrom datasets import load_dataset\r\n\r\n# This takes a few minutes to run, so go grab a tea or coffee while you wait :)\r\ndata_files = \"https:\/\/the-eye.eu\/public\/AI\/pile_preliminary_components\/PUBMED_title_abstracts_2019_baseline.jsonl.zst\"\r\npubmed_dataset = load_dataset(\"json\", data_files=data_files, split=\"train\")\r\npubmed_dataset\r\n```\r\n\r\nI also tried with `wget` as follows.\r\n```\r\nwget https:\/\/the-eye.eu\/public\/AI\/pile_preliminary_components\/PUBMED_title_abstracts_2019_baseline.jsonl.zst\r\n```\r\n\r\n## Expected results\r\nI expect to be able to download this file.\r\n\r\n## Actual results\r\nTraceback\r\n```\r\n---------------------------------------------------------------------------\r\ntimeout Traceback (most recent call last)\r\n\/usr\/lib\/python3\/dist-packages\/urllib3\/connection.py in _new_conn(self)\r\n 158 try:\r\n--> 159 conn = connection.create_connection(\r\n 160 (self._dns_host, self.port), self.timeout, **extra_kw\r\n\r\n\/usr\/lib\/python3\/dist-packages\/urllib3\/util\/connection.py in create_connection(address, timeout, source_address, socket_options)\r\n 83 if err is not None:\r\n---> 84 raise err\r\n 85 \r\n\r\n\/usr\/lib\/python3\/dist-packages\/urllib3\/util\/connection.py in create_connection(address, timeout, source_address, socket_options)\r\n 73 sock.bind(source_address)\r\n---> 74 sock.connect(sa)\r\n 75 return sock\r\n\r\ntimeout: timed out\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nConnectTimeoutError Traceback (most recent call last)\r\n\/usr\/lib\/python3\/dist-packages\/urllib3\/connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\r\n 664 # Make the request on the httplib connection object.\r\n--> 665 httplib_response = self._make_request(\r\n 666 conn,\r\n\r\n\/usr\/lib\/python3\/dist-packages\/urllib3\/connectionpool.py in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)\r\n 375 try:\r\n--> 376 self._validate_conn(conn)\r\n 377 except (SocketTimeout, BaseSSLError) as e:\r\n\r\n\/usr\/lib\/python3\/dist-packages\/urllib3\/connectionpool.py in _validate_conn(self, conn)\r\n 995 if not getattr(conn, \"sock\", None): # AppEngine might not have `.sock`\r\n--> 996 conn.connect()\r\n 997 \r\n\r\n\/usr\/lib\/python3\/dist-packages\/urllib3\/connection.py in connect(self)\r\n 313 # Add certificate verification\r\n--> 314 conn = self._new_conn()\r\n 315 hostname = self.host\r\n\r\n\/usr\/lib\/python3\/dist-packages\/urllib3\/connection.py in _new_conn(self)\r\n 163 except SocketTimeout:\r\n--> 164 raise ConnectTimeoutError(\r\n 165 self,\r\n\r\nConnectTimeoutError: (, 'Connection to the-eye.eu timed out. (connect timeout=10.0)')\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nMaxRetryError Traceback (most recent call last)\r\n\/usr\/lib\/python3\/dist-packages\/requests\/adapters.py in send(self, request, stream, timeout, verify, cert, proxies)\r\n 438 if not chunked:\r\n--> 439 resp = conn.urlopen(\r\n 440 method=request.method,\r\n\r\n\/usr\/lib\/python3\/dist-packages\/urllib3\/connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\r\n 718 \r\n--> 719 retries = retries.increment(\r\n 720 method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]\r\n\r\n\/usr\/lib\/python3\/dist-packages\/urllib3\/util\/retry.py in increment(self, method, url, response, error, _pool, _stacktrace)\r\n 435 if new_retry.is_exhausted():\r\n--> 436 raise MaxRetryError(_pool, url, error or ResponseError(cause))\r\n 437 \r\n\r\nMaxRetryError: HTTPSConnectionPool(host='the-eye.eu', port=443): Max retries exceeded with url: \/public\/AI\/pile_preliminary_components\/PUBMED_title_abstracts_2019_baseline.jsonl.zst (Caused by ConnectTimeoutError(, 'Connection to the-eye.eu timed out. (connect timeout=10.0)'))\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nConnectTimeout Traceback (most recent call last)\r\n\/tmp\/ipykernel_15104\/606583593.py in \r\n 3 # This takes a few minutes to run, so go grab a tea or coffee while you wait :)\r\n 4 data_files = \"https:\/\/the-eye.eu\/public\/AI\/pile_preliminary_components\/PUBMED_title_abstracts_2019_baseline.jsonl.zst\"\r\n----> 5 pubmed_dataset = load_dataset(\"json\", data_files=data_files, split=\"train\")\r\n 6 pubmed_dataset\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, revision, use_auth_token, task, streaming, script_version, **config_kwargs)\r\n 1655 \r\n 1656 # Create a dataset builder\r\n-> 1657 builder_instance = load_dataset_builder(\r\n 1658 path=path,\r\n 1659 name=name,\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/load.py in load_dataset_builder(path, name, data_dir, data_files, cache_dir, features, download_config, download_mode, revision, use_auth_token, script_version, **config_kwargs)\r\n 1492 download_config = download_config.copy() if download_config else DownloadConfig()\r\n 1493 download_config.use_auth_token = use_auth_token\r\n-> 1494 dataset_module = dataset_module_factory(\r\n 1495 path, revision=revision, download_config=download_config, download_mode=download_mode, data_files=data_files\r\n 1496 )\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/load.py in dataset_module_factory(path, revision, download_config, download_mode, force_local_path, dynamic_modules_path, data_files, **download_kwargs)\r\n 1116 # Try packaged\r\n 1117 if path in _PACKAGED_DATASETS_MODULES:\r\n-> 1118 return PackagedDatasetModuleFactory(\r\n 1119 path, data_files=data_files, download_config=download_config, download_mode=download_mode\r\n 1120 ).get_module()\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/load.py in get_module(self)\r\n 773 else get_patterns_locally(str(Path().resolve()))\r\n 774 )\r\n--> 775 data_files = DataFilesDict.from_local_or_remote(patterns, use_auth_token=self.downnload_config.use_auth_token)\r\n 776 module_path, hash = _PACKAGED_DATASETS_MODULES[self.name]\r\n 777 builder_kwargs = {\"hash\": hash, \"data_files\": data_files}\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/data_files.py in from_local_or_remote(cls, patterns, base_path, allowed_extensions, use_auth_token)\r\n 576 for key, patterns_for_key in patterns.items():\r\n 577 out[key] = (\r\n--> 578 DataFilesList.from_local_or_remote(\r\n 579 patterns_for_key,\r\n 580 base_path=base_path,\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/data_files.py in from_local_or_remote(cls, patterns, base_path, allowed_extensions, use_auth_token)\r\n 545 base_path = base_path if base_path is not None else str(Path().resolve())\r\n 546 data_files = resolve_patterns_locally_or_by_urls(base_path, patterns, allowed_extensions)\r\n--> 547 origin_metadata = _get_origin_metadata_locally_or_by_urls(data_files, use_auth_token=use_auth_token)\r\n 548 return cls(data_files, origin_metadata)\r\n 549 \r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/data_files.py in _get_origin_metadata_locally_or_by_urls(data_files, max_workers, use_auth_token)\r\n 492 data_files: List[Union[Path, Url]], max_workers=64, use_auth_token: Optional[Union[bool, str]] = None\r\n 493 ) -> Tuple[str]:\r\n--> 494 return thread_map(\r\n 495 partial(_get_single_origin_metadata_locally_or_by_urls, use_auth_token=use_auth_token),\r\n 496 data_files,\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/tqdm\/contrib\/concurrent.py in thread_map(fn, *iterables, **tqdm_kwargs)\r\n 92 \"\"\"\r\n 93 from concurrent.futures import ThreadPoolExecutor\r\n---> 94 return _executor_map(ThreadPoolExecutor, fn, *iterables, **tqdm_kwargs)\r\n 95 \r\n 96 \r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/tqdm\/contrib\/concurrent.py in _executor_map(PoolExecutor, fn, *iterables, **tqdm_kwargs)\r\n 74 map_args.update(chunksize=chunksize)\r\n 75 with PoolExecutor(**pool_kwargs) as ex:\r\n---> 76 return list(tqdm_class(ex.map(fn, *iterables, **map_args), **kwargs))\r\n 77 \r\n 78 \r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/tqdm\/notebook.py in __iter__(self)\r\n 252 def __iter__(self):\r\n 253 try:\r\n--> 254 for obj in super(tqdm_notebook, self).__iter__():\r\n 255 # return super(tqdm...) will not catch exception\r\n 256 yield obj\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/tqdm\/std.py in __iter__(self)\r\n 1171 # (note: keep this check outside the loop for performance)\r\n 1172 if self.disable:\r\n-> 1173 for obj in iterable:\r\n 1174 yield obj\r\n 1175 return\r\n\r\n\/usr\/lib\/python3.8\/concurrent\/futures\/_base.py in result_iterator()\r\n 617 # Careful not to keep a reference to the popped future\r\n 618 if timeout is None:\r\n--> 619 yield fs.pop().result()\r\n 620 else:\r\n 621 yield fs.pop().result(end_time - time.monotonic())\r\n\r\n\/usr\/lib\/python3.8\/concurrent\/futures\/_base.py in result(self, timeout)\r\n 442 raise CancelledError()\r\n 443 elif self._state == FINISHED:\r\n--> 444 return self.__get_result()\r\n 445 else:\r\n 446 raise TimeoutError()\r\n\r\n\/usr\/lib\/python3.8\/concurrent\/futures\/_base.py in __get_result(self)\r\n 387 if self._exception:\r\n 388 try:\r\n--> 389 raise self._exception\r\n 390 finally:\r\n 391 # Break a reference cycle with the exception in self._exception\r\n\r\n\/usr\/lib\/python3.8\/concurrent\/futures\/thread.py in run(self)\r\n 55 \r\n 56 try:\r\n---> 57 result = self.fn(*self.args, **self.kwargs)\r\n 58 except BaseException as exc:\r\n 59 self.future.set_exception(exc)\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/data_files.py in _get_single_origin_metadata_locally_or_by_urls(data_file, use_auth_token)\r\n 483 if isinstance(data_file, Url):\r\n 484 data_file = str(data_file)\r\n--> 485 return (request_etag(data_file, use_auth_token=use_auth_token),)\r\n 486 else:\r\n 487 data_file = str(data_file.resolve())\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py in request_etag(url, use_auth_token)\r\n 489 def request_etag(url: str, use_auth_token: Optional[Union[str, bool]] = None) -> Optional[str]:\r\n 490 headers = get_authentication_headers_for_url(url, use_auth_token=use_auth_token)\r\n--> 491 response = http_head(url, headers=headers, max_retries=3)\r\n 492 response.raise_for_status()\r\n 493 etag = response.headers.get(\"ETag\") if response.ok else None\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py in http_head(url, proxies, headers, cookies, allow_redirects, timeout, max_retries)\r\n 474 headers = copy.deepcopy(headers) or {}\r\n 475 headers[\"user-agent\"] = get_datasets_user_agent(user_agent=headers.get(\"user-agent\"))\r\n--> 476 response = _request_with_retry(\r\n 477 method=\"HEAD\",\r\n 478 url=url,\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py in _request_with_retry(method, url, max_retries, base_wait_time, max_wait_time, timeout, **params)\r\n 407 except (requests.exceptions.ConnectTimeout, requests.exceptions.ConnectionError) as err:\r\n 408 if tries > max_retries:\r\n--> 409 raise err\r\n 410 else:\r\n 411 logger.info(f\"{method} request to {url} timed out, retrying... [{tries\/max_retries}]\")\r\n\r\n~\/.local\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py in _request_with_retry(method, url, max_retries, base_wait_time, max_wait_time, timeout, **params)\r\n 403 tries += 1\r\n 404 try:\r\n--> 405 response = requests.request(method=method.upper(), url=url, timeout=timeout, **params)\r\n 406 success = True\r\n 407 except (requests.exceptions.ConnectTimeout, requests.exceptions.ConnectionError) as err:\r\n\r\n\/usr\/lib\/python3\/dist-packages\/requests\/api.py in request(method, url, **kwargs)\r\n 58 # cases, and look like a memory leak in others.\r\n 59 with sessions.Session() as session:\r\n---> 60 return session.request(method=method, url=url, **kwargs)\r\n 61 \r\n 62 \r\n\r\n\/usr\/lib\/python3\/dist-packages\/requests\/sessions.py in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)\r\n 531 }\r\n 532 send_kwargs.update(settings)\r\n--> 533 resp = self.send(prep, **send_kwargs)\r\n 534 \r\n 535 return resp\r\n\r\n\/usr\/lib\/python3\/dist-packages\/requests\/sessions.py in send(self, request, **kwargs)\r\n 644 \r\n 645 # Send the request\r\n--> 646 r = adapter.send(request, **kwargs)\r\n 647 \r\n 648 # Total elapsed time of the request (approximately)\r\n\r\n\/usr\/lib\/python3\/dist-packages\/requests\/adapters.py in send(self, request, stream, timeout, verify, cert, proxies)\r\n 502 # TODO: Remove this in 3.0.0: see #2811\r\n 503 if not isinstance(e.reason, NewConnectionError):\r\n--> 504 raise ConnectTimeout(e, request=request)\r\n 505 \r\n 506 if isinstance(e.reason, ResponseError):\r\n\r\nConnectTimeout: HTTPSConnectionPool(host='the-eye.eu', port=443): Max retries exceeded with url: \/public\/AI\/pile_preliminary_components\/PUBMED_title_abstracts_2019_baseline.jsonl.zst (Caused by ConnectTimeoutError(, 'Connection to the-eye.eu timed out. (connect timeout=10.0)'))\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.17.0\r\n- Platform: Linux-5.11.0-43-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 6.0.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3504\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3504\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3503","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3503\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3503\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3503\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3503","id":1090472735,"node_id":"I_kwDODunzps5A_0sf","number":3503,"title":"Batched in filter throws error","user":{"login":"gpucce","id":32967787,"node_id":"MDQ6VXNlcjMyOTY3Nzg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32967787?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gpucce","html_url":"https:\/\/github.com\/gpucce","followers_url":"https:\/\/api.github.com\/users\/gpucce\/followers","following_url":"https:\/\/api.github.com\/users\/gpucce\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gpucce\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gpucce\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gpucce\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gpucce\/orgs","repos_url":"https:\/\/api.github.com\/users\/gpucce\/repos","events_url":"https:\/\/api.github.com\/users\/gpucce\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gpucce\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"assignees":[{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-12-29T12:01:04Z","updated_at":"2022-01-04T10:24:27Z","closed_at":"2022-01-04T10:24:27Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I hope this is really a bug, I could not find it among the open issues\r\n\r\n## Describe the bug\r\nusing `batched=False` in DataSet.filter throws error\r\n```python\r\nTypeError: filter() got an unexpected keyword argument 'batched'\r\n```\r\nbut in the docs it is lister as an argument.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ntask = \"mnli\"\r\nmax_length = 128\r\ntokenizer = AutoTokenizer.from_pretrained(\".\/pretrained_models\/pretrained_models_drozd\/sl250.m.gsic.titech.ac.jp:8000\/21.11.17_06.30.32_roberta-base_a0057\/checkpoints\/smpl_400M\/hf\/\")\r\n\r\ndataset = load_dataset(\"glue\", task)\r\n\r\ntask_to_keys = {\r\n \"cola\": (\"sentence\", None),\r\n \"mnli\": (\"premise\", \"hypothesis\"),\r\n \"mnli-mm\": (\"premise\", \"hypothesis\"),\r\n \"mrpc\": (\"sentence1\", \"sentence2\"),\r\n \"qnli\": (\"question\", \"sentence\"),\r\n \"qqp\": (\"question1\", \"question2\"),\r\n \"rte\": (\"sentence1\", \"sentence2\"),\r\n \"sst2\": (\"sentence\", None),\r\n \"stsb\": (\"sentence1\", \"sentence2\"),\r\n \"wnli\": (\"sentence1\", \"sentence2\"),\r\n}\r\n\r\n##### tokenization_parameters\r\nsentence1_key, sentence2_key = task_to_keys[task]\r\ndef preprocess_function(examples, max_length):\r\n if sentence2_key is None:\r\n return tokenizer(\r\n examples[sentence1_key], truncation=True, max_length=max_length\r\n )\r\n return tokenizer(\r\n examples[sentence1_key],\r\n examples[sentence2_key],\r\n truncation=False,\r\n padding=\"max_length\",\r\n max_length=max_length,\r\n )\r\n\r\nencoded_dataset = dataset.map(\r\n lambda x: preprocess_function(x, max_length=max_length), batched=False\r\n)\r\n\r\nencoded_dataset.filter(lambda x: len(x['input_ids']) <= max_length, batched=False)\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.16.1, 1.17.0\r\n- Platform: ubuntu\r\n- Python version: 3.8.12\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3503\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3503\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3502","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3502\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3502\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3502\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3502","id":1090438558,"node_id":"PR_kwDODunzps4wXSLi","number":3502,"title":"Add QuALITY","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-29T10:58:46Z","updated_at":"2022-01-10T10:53:52Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3502","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3502","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3502.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3502.patch","merged_at":null},"body":"Fixes #3441.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3502\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3502\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3501","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3501\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3501\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3501\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3501","id":1090413758,"node_id":"PR_kwDODunzps4wXM8H","number":3501,"title":"Update pib dataset card","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-29T10:14:40Z","updated_at":"2021-12-29T11:13:21Z","closed_at":"2021-12-29T11:13:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3501","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3501","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3501.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3501.patch","merged_at":"2021-12-29T11:13:21Z"},"body":"Related to #3496","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3501\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3501\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3500","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3500\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3500\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3500\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3500","id":1090406133,"node_id":"PR_kwDODunzps4wXLTB","number":3500,"title":"Docs: Add VCTK dataset description","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-29T10:02:05Z","updated_at":"2022-01-04T10:46:02Z","closed_at":"2022-01-04T10:25:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3500","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3500","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3500.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3500.patch","merged_at":"2022-01-04T10:25:09Z"},"body":"This PR is a very minor followup to #1837, with only docs changes (single comment string).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3500\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3500\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3499","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3499\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3499\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3499\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3499","id":1090132618,"node_id":"I_kwDODunzps5A-hqK","number":3499,"title":"Adjusting chunk size for streaming datasets","user":{"login":"JoelNiklaus","id":3775944,"node_id":"MDQ6VXNlcjM3NzU5NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3775944?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JoelNiklaus","html_url":"https:\/\/github.com\/JoelNiklaus","followers_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/followers","following_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/orgs","repos_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/repos","events_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-28T21:17:53Z","updated_at":"2022-01-07T15:39:21Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nI want to use mc4 which I cannot save locally, so I stream it. However, I want to process the entire dataset and filter some documents from it. With the current chunk size of around 1000 documents (right?) I hit a performance bottleneck because of the frequent decompressing. \r\n\r\n**Describe the solution you'd like**\r\nI would appreciate a parameter in the load_dataset function, that allows me to set the chunksize myself (to a value like 100'000 in my case). Like that, I hope to improve the processing time.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3499\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3499\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3498","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3498\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3498\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3498\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3498","id":1090096332,"node_id":"PR_kwDODunzps4wWL5U","number":3498,"title":"update `pretty_name` for first 200 datasets","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-28T19:50:07Z","updated_at":"2022-01-05T18:48:23Z","closed_at":"2022-01-05T16:38:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3498","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3498","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3498.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3498.patch","merged_at":"2022-01-05T16:38:21Z"},"body":"I made a script some time back to fetch `pretty_names` from `papers_with_code` dataset along with some other rules incase that dataset wasn't available on `papers_with_code`. Updating them in the `README` of `datasets`. Took only the first 200 datasets into consideration and after some eyeballing, most of them were looking good to me! ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3498\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3498\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3497","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3497\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3497\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3497\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3497","id":1090050148,"node_id":"I_kwDODunzps5A-Nhk","number":3497,"title":"Changing sampling rate in audio dataset and subsequently mapping with `num_proc > 1` leads to weird bug","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-12-28T18:03:49Z","updated_at":"2022-01-21T13:22:27Z","closed_at":"2022-01-21T13:22:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Running: \r\n\r\n```python\r\nfrom datasets import load_dataset, DatasetDict\r\nimport datasets\r\nfrom transformers import AutoFeatureExtractor\r\n\r\nraw_datasets = DatasetDict()\r\n\r\nraw_datasets[\"train\"] = load_dataset(\"common_voice\", \"ab\", split=\"train\")\r\n\r\nfeature_extractor = AutoFeatureExtractor.from_pretrained(\"facebook\/wav2vec2-base\")\r\n\r\nraw_datasets = raw_datasets.cast_column(\r\n \"audio\", datasets.features.Audio(sampling_rate=feature_extractor.sampling_rate)\r\n)\r\n\r\nnum_workers = 16\r\n\r\ndef prepare_dataset(batch):\r\n sample = batch[\"audio\"]\r\n\r\n inputs = feature_extractor(sample[\"array\"], sampling_rate=sample[\"sampling_rate\"])\r\n batch[\"input_values\"] = inputs.input_values[0]\r\n batch[\"input_length\"] = len(batch[\"input_values\"])\r\n return batch\r\n\r\nraw_datasets.map(\r\n prepare_dataset,\r\n remove_columns=next(iter(raw_datasets.values())).column_names,\r\n num_proc=16,\r\n desc=\"preprocess datasets\",\r\n)\r\n```\r\n\r\ngives\r\n\r\n```bash\r\n File \"\/home\/patrick\/experiments\/run_bug.py\", line 25, in \r\n raw_datasets.map(\r\n File \"\/home\/patrick\/python_bin\/datasets\/dataset_dict.py\", line 492, in map\r\n {\r\n File \"\/home\/patrick\/python_bin\/datasets\/dataset_dict.py\", line 493, in \r\n k: dataset.map(\r\n File \"\/home\/patrick\/python_bin\/datasets\/arrow_dataset.py\", line 2139, in map\r\n shards = [\r\n File \"\/home\/patrick\/python_bin\/datasets\/arrow_dataset.py\", line 2140, in \r\n self.shard(num_shards=num_proc, index=rank, contiguous=True, keep_in_memory=keep_in_memory)\r\n File \"\/home\/patrick\/python_bin\/datasets\/arrow_dataset.py\", line 3164, in shard\r\n return self.select(\r\n File \"\/home\/patrick\/python_bin\/datasets\/arrow_dataset.py\", line 485, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/patrick\/python_bin\/datasets\/fingerprint.py\", line 411, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/patrick\/python_bin\/datasets\/arrow_dataset.py\", line 2756, in select\r\n return self._new_dataset_with_indices(indices_buffer=buf_writer.getvalue(), fingerprint=new_fingerprint)\r\n File \"\/home\/patrick\/python_bin\/datasets\/arrow_dataset.py\", line 2667, in _new_dataset_with_indices\r\n return Dataset(\r\n File \"\/home\/patrick\/python_bin\/datasets\/arrow_dataset.py\", line 659, in __init__\r\n raise ValueError(\r\nValueError: External features info don't match the dataset:\r\nGot\r\n{'client_id': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'audio': Audio(sampling_rate=16000, mono=True, _storage_dtype='string', id=None), 'sentence': Value(dtype='string', id=None), 'up_votes': Value(dtype='int64', id=None), 'down_votes': Value(dtype='int64', id=None), 'age': Value(dtype='string', id=None), 'gender': Value(dtype='string', id=None), 'accent': Value(dtype='string', id=None), 'locale': Value(dtype='string', id=None), 'segment': Value(dtype='string', id=None)}\r\nwith type\r\nstruct\r\n\r\nbut expected something like\r\n{'client_id': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'audio': {'path': Value(dtype='string', id=None), 'bytes': Value(dtype='binary', id=None)}, 'sentence': Value(dtype='string', id=None), 'up_votes': Value(dtype='int64', id=None), 'down_votes': Value(dtype='int64', id=None), 'age': Value(dtype='string', id=None), 'gender': Value(dtype='string', id=None), 'accent': Value(dtype='string', id=None), 'locale': Value(dtype='string', id=None), 'segment': Value(dtype='string', id=None)}\r\nwith type\r\nstruct, sentence: string, up_votes: int64, down_votes: int64, age: string, gender: string, accent: string, locale: string, segment: string>\r\n```\r\n\r\nVersions:\r\n\r\n```python\r\n- `datasets` version: 1.16.2.dev0\r\n- Platform: Linux-5.15.8-76051508-generic-x86_64-with-glibc2.33\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.1\r\n```\r\n\r\nand `transformers`:\r\n\r\n```\r\n- `transformers` version: 4.16.0.dev0\r\n- Platform: Linux-5.15.8-76051508-generic-x86_64-with-glibc2.33\r\n- Python version: 3.9.7\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3497\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3497\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3496","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3496\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3496\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3496\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3496","id":1089989155,"node_id":"PR_kwDODunzps4wV1_w","number":3496,"title":"Update version of pib dataset and make it streamable","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-28T16:01:55Z","updated_at":"2022-01-03T14:42:28Z","closed_at":"2021-12-29T08:42:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3496","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3496","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3496.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3496.patch","merged_at":"2021-12-29T08:42:57Z"},"body":"This PR:\r\n- Updates version of pib dataset: from 0.0.0 to 1.3.0\r\n- Makes the dataset streamable\r\n\r\nFix #3491.\r\n\r\nCC: @severo ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3496\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3496\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3495","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3495\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3495\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3495\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3495","id":1089983632,"node_id":"I_kwDODunzps5A99SQ","number":3495,"title":"Add VoxLingua107","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-28T15:51:43Z","updated_at":"2021-12-28T15:51:43Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** VoxLingua107\r\n- **Description:** VoxLingua107 is a speech dataset for training spoken language identification models. \r\n- **Paper:** https:\/\/arxiv.org\/abs\/2011.12998\r\n- **Data:** http:\/\/bark.phon.ioc.ee\/voxlingua107\/\r\n- **Motivation:** 107 languages, totaling 6628 hours for the train split.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3495\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3495\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3494","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3494\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3494\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3494\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3494","id":1089983103,"node_id":"PR_kwDODunzps4wV0vB","number":3494,"title":"Clone full repo to detect new tags when mirroring datasets on the Hub","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-28T15:50:47Z","updated_at":"2021-12-28T16:07:21Z","closed_at":"2021-12-28T16:07:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3494","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3494","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3494.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3494.patch","merged_at":"2021-12-28T16:07:20Z"},"body":"The new releases of `datasets` were not detected because the shallow clone in the CI wasn't getting the git tags.\r\n\r\nBy cloning the full repository we can properly detect a new release, and tag all the dataset repositories accordingly\r\ncc @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3494\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3494\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3493","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3493\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3493\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3493\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3493","id":1089967286,"node_id":"PR_kwDODunzps4wVxfr","number":3493,"title":"Fix VCTK encoding","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-28T15:23:36Z","updated_at":"2021-12-28T15:48:18Z","closed_at":"2021-12-28T15:48:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3493","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3493","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3493.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3493.patch","merged_at":"2021-12-28T15:48:17Z"},"body":"utf-8 encoding was missing in the VCTK dataset builder added in #3351 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3493\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3493\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3492","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3492\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3492\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3492\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3492","id":1089952943,"node_id":"PR_kwDODunzps4wVufr","number":3492,"title":"Add `gzip` for `to_json`","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-28T15:01:11Z","updated_at":"2022-01-05T13:03:36Z","closed_at":"2022-01-05T13:03:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3492","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3492","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3492.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3492.patch","merged_at":"2022-01-05T13:03:35Z"},"body":"(Partially) closes #3480. I have added `gzip` compression for `to_json`. I realised we can run into this compression problem with `to_csv` as well. `IOHandler` can be used for `to_csv` too. Please let me know if any changes are required. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3492\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3492\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3491","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3491\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3491\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3491\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3491","id":1089918018,"node_id":"I_kwDODunzps5A9tRC","number":3491,"title":"Update version of pib dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-12-28T14:03:58Z","updated_at":"2021-12-29T08:42:57Z","closed_at":"2021-12-29T08:42:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"On the Hub we have v0, while there exists v1.3.\r\n\r\nRelated to bigscience-workshop\/data_tooling#130\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3491\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3491\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3490","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3490\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3490\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3490\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3490","id":1089730181,"node_id":"I_kwDODunzps5A8_aF","number":3490,"title":"Does datasets support load text from HDFS?","user":{"login":"dancingpipi","id":20511825,"node_id":"MDQ6VXNlcjIwNTExODI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20511825?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dancingpipi","html_url":"https:\/\/github.com\/dancingpipi","followers_url":"https:\/\/api.github.com\/users\/dancingpipi\/followers","following_url":"https:\/\/api.github.com\/users\/dancingpipi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dancingpipi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dancingpipi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dancingpipi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dancingpipi\/orgs","repos_url":"https:\/\/api.github.com\/users\/dancingpipi\/repos","events_url":"https:\/\/api.github.com\/users\/dancingpipi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dancingpipi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-28T08:56:02Z","updated_at":"2022-01-05T15:10:11Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The raw text data is stored on HDFS due to the dataset's size is too large to store on my develop machine, \r\nso I wander does datasets support read data from hdfs?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3490\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3490\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3489","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3489\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3489\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3489\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3489","id":1089401926,"node_id":"PR_kwDODunzps4wT97d","number":3489,"title":"Avoid unnecessary list creations","user":{"login":"bryant1410","id":3905501,"node_id":"MDQ6VXNlcjM5MDU1MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3905501?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bryant1410","html_url":"https:\/\/github.com\/bryant1410","followers_url":"https:\/\/api.github.com\/users\/bryant1410\/followers","following_url":"https:\/\/api.github.com\/users\/bryant1410\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bryant1410\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bryant1410\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bryant1410\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bryant1410\/orgs","repos_url":"https:\/\/api.github.com\/users\/bryant1410\/repos","events_url":"https:\/\/api.github.com\/users\/bryant1410\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bryant1410\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-27T18:20:56Z","updated_at":"2022-01-04T18:19:31Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3489","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3489","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3489.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3489.patch","merged_at":null},"body":"Like in `join([... for s in ...])`. Also changed other things that I saw:\r\n* Use a `with` statement for many `open` that missed them, so the files don't remain open.\r\n* Remove unused variables.\r\n* Many HTTP links converted into HTTPS (verified).\r\n* Remove unnecessary \"r\" mode arg in `open` (double-checked it was actually the default in each case).\r\n* Remove Python 2 style of using `super`.\r\n* Run `pyupgrade $(find . -name \"*.py\" -type f) --py36-plus` (which already does some of the previous points).\r\n* Run `dos2unix $(find . -name \"*.py\" -type f)` (CRLF to LF line endings).\r\n* Fix typos.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3489\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3489\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3488","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3488\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3488\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3488\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3488","id":1089345653,"node_id":"I_kwDODunzps5A7hh1","number":3488,"title":"URL query parameters are set as path in the compression hop for fsspec","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-27T16:29:00Z","updated_at":"2022-01-05T15:15:25Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThere is an ssue with `StreamingDownloadManager._extract`.\r\n\r\nI don't know how the test `test_streaming_gg_drive_gzipped` passes:\r\n\r\nFor \r\n```python\r\nTEST_GG_DRIVE_GZIPPED_URL = \"https:\/\/drive.google.com\/uc?export=download&id=1Bt4Garpf0QLiwkJhHJzXaVa0I0H5Qhwz\"\r\nurlpath = StreamingDownloadManager().download_and_extract(TEST_GG_DRIVE_GZIPPED_URL)\r\n```\r\ngives `urlpath`:\r\n```python \r\n'gzip:\/\/uc?export=download&id=1Bt4Garpf0QLiwkJhHJzXaVa0I0H5Qhwz::https:\/\/drive.google.com\/uc?export=download&id=1Bt4Garpf0QLiwkJhHJzXaVa0I0H5Qhwz'\r\n```\r\n\r\nThe gzip path makes no sense: `gzip:\/\/uc?export=download&id=1Bt4Garpf0QLiwkJhHJzXaVa0I0H5Qhwz`\r\n\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets.utils.streaming_download_manager import StreamingDownloadManager\r\n\r\ndl_manager = StreamingDownloadManager()\r\nurlpath = dl_manager.extract(\"https:\/\/drive.google.com\/uc?export=download&id=1Bt4Garpf0QLiwkJhHJzXaVa0I0H5Qhwz\")\r\nprint(urlpath)\r\n```\r\n\r\n## Expected results\r\nThe query parameters should not be set as part of the path.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3488\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3488\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3487","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3487\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3487\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3487\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3487","id":1089209031,"node_id":"PR_kwDODunzps4wTVeN","number":3487,"title":"Update ADD_NEW_DATASET.md","user":{"login":"apergo-ai","id":68908804,"node_id":"MDQ6VXNlcjY4OTA4ODA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68908804?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apergo-ai","html_url":"https:\/\/github.com\/apergo-ai","followers_url":"https:\/\/api.github.com\/users\/apergo-ai\/followers","following_url":"https:\/\/api.github.com\/users\/apergo-ai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apergo-ai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apergo-ai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apergo-ai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apergo-ai\/orgs","repos_url":"https:\/\/api.github.com\/users\/apergo-ai\/repos","events_url":"https:\/\/api.github.com\/users\/apergo-ai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apergo-ai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-27T12:24:51Z","updated_at":"2021-12-27T15:00:45Z","closed_at":"2021-12-27T15:00:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3487","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3487","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3487.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3487.patch","merged_at":"2021-12-27T15:00:45Z"},"body":"fixed make style prompt for Windows Terminal","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3487\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3487\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3486","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3486\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3486\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3486\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3486","id":1089171551,"node_id":"PR_kwDODunzps4wTNd1","number":3486,"title":"Fix weird spacing in ManualDownloadError message","user":{"login":"bryant1410","id":3905501,"node_id":"MDQ6VXNlcjM5MDU1MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3905501?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bryant1410","html_url":"https:\/\/github.com\/bryant1410","followers_url":"https:\/\/api.github.com\/users\/bryant1410\/followers","following_url":"https:\/\/api.github.com\/users\/bryant1410\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bryant1410\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bryant1410\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bryant1410\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bryant1410\/orgs","repos_url":"https:\/\/api.github.com\/users\/bryant1410\/repos","events_url":"https:\/\/api.github.com\/users\/bryant1410\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bryant1410\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-27T11:20:36Z","updated_at":"2021-12-28T09:03:26Z","closed_at":"2021-12-28T09:00:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3486","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3486","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3486.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3486.patch","merged_at":"2021-12-28T09:00:28Z"},"body":"`textwrap.dedent` works based on the spaces at the beginning. Before this change, there wasn't any space.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3486\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3486\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3485","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3485\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3485\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3485\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3485","id":1089027581,"node_id":"I_kwDODunzps5A6T39","number":3485,"title":"skip columns which cannot set to specific format when set_format","user":{"login":"tshu-w","id":13161779,"node_id":"MDQ6VXNlcjEzMTYxNzc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13161779?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tshu-w","html_url":"https:\/\/github.com\/tshu-w","followers_url":"https:\/\/api.github.com\/users\/tshu-w\/followers","following_url":"https:\/\/api.github.com\/users\/tshu-w\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tshu-w\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tshu-w\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tshu-w\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tshu-w\/orgs","repos_url":"https:\/\/api.github.com\/users\/tshu-w\/repos","events_url":"https:\/\/api.github.com\/users\/tshu-w\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tshu-w\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-27T07:19:55Z","updated_at":"2021-12-27T09:07:07Z","closed_at":"2021-12-27T09:07:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nWhen using `dataset.set_format(\"torch\")`, I must make sure every columns in datasets can convert to `torch`, however, sometimes I want to keep some string columns.\r\n\r\n**Describe the solution you'd like**\r\nskip columns which cannot set to specific format when set_format instead of raise an error.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3485\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3485\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3484","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3484\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3484\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3484\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3484","id":1088910402,"node_id":"I_kwDODunzps5A53RC","number":3484,"title":"make shape verification to use ArrayXD instead of nested lists for map","user":{"login":"tshu-w","id":13161779,"node_id":"MDQ6VXNlcjEzMTYxNzc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13161779?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tshu-w","html_url":"https:\/\/github.com\/tshu-w","followers_url":"https:\/\/api.github.com\/users\/tshu-w\/followers","following_url":"https:\/\/api.github.com\/users\/tshu-w\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tshu-w\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tshu-w\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tshu-w\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tshu-w\/orgs","repos_url":"https:\/\/api.github.com\/users\/tshu-w\/repos","events_url":"https:\/\/api.github.com\/users\/tshu-w\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tshu-w\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-27T02:16:02Z","updated_at":"2022-01-05T13:54:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As describe in https:\/\/github.com\/huggingface\/datasets\/issues\/2005#issuecomment-793716753 and mentioned by @mariosasko in [image feature example](https:\/\/colab.research.google.com\/drive\/1mIrTnqTVkWLJWoBzT1ABSe-LFelIep1c#scrollTo=ow3XHDvf2I0B&line=1&uniqifier=1), IMO make shape verifcaiton to use ArrayXD instead of nested lists for map can help user reduce unnecessary cast. I notice datasets have done something special for `input_ids` and `attention_mask` which is also unnecessary after this feature added.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3484\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3484\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3483","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3483\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3483\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3483\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3483","id":1088784157,"node_id":"PR_kwDODunzps4wSAW4","number":3483,"title":"Remove unused phony rule from Makefile","user":{"login":"bryant1410","id":3905501,"node_id":"MDQ6VXNlcjM5MDU1MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3905501?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bryant1410","html_url":"https:\/\/github.com\/bryant1410","followers_url":"https:\/\/api.github.com\/users\/bryant1410\/followers","following_url":"https:\/\/api.github.com\/users\/bryant1410\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bryant1410\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bryant1410\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bryant1410\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bryant1410\/orgs","repos_url":"https:\/\/api.github.com\/users\/bryant1410\/repos","events_url":"https:\/\/api.github.com\/users\/bryant1410\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bryant1410\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-26T14:37:13Z","updated_at":"2022-01-05T19:44:56Z","closed_at":"2022-01-05T16:34:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3483","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3483","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3483.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3483.patch","merged_at":"2022-01-05T16:34:12Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3483\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3483\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3482","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3482\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3482\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3482\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3482","id":1088317921,"node_id":"PR_kwDODunzps4wQqE1","number":3482,"title":"Fix duplicate keys in NewsQA","user":{"login":"bryant1410","id":3905501,"node_id":"MDQ6VXNlcjM5MDU1MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3905501?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bryant1410","html_url":"https:\/\/github.com\/bryant1410","followers_url":"https:\/\/api.github.com\/users\/bryant1410\/followers","following_url":"https:\/\/api.github.com\/users\/bryant1410\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bryant1410\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bryant1410\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bryant1410\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bryant1410\/orgs","repos_url":"https:\/\/api.github.com\/users\/bryant1410\/repos","events_url":"https:\/\/api.github.com\/users\/bryant1410\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bryant1410\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-24T11:01:59Z","updated_at":"2021-12-26T16:29:53Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3482","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3482","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3482.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3482.patch","merged_at":null},"body":"* Fix duplicate keys in NewsQA when loading from CSV files.\r\n* Fix s\/narqa\/newsqa\/ in the download manually error message.\r\n* Make the download manually error message show nicely when printed. Otherwise, is hard to read due to spacing issues.\r\n* Fix the format of the license text.\r\n* Reformat the code to make it simpler.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3482\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3482\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3481","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3481\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3481\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3481\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3481","id":1088308343,"node_id":"PR_kwDODunzps4wQoJu","number":3481,"title":"Fix overriding of filesystem info","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-24T10:42:31Z","updated_at":"2021-12-24T11:08:59Z","closed_at":"2021-12-24T11:08:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3481","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3481","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3481.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3481.patch","merged_at":"2021-12-24T11:08:59Z"},"body":"Previously, `BaseCompressedFileFileSystem.info` was overridden and transformed from function to dict.\r\n\r\nThis generated a bug for filesystem methods that use `self.info()`, like e.g. `fs.isfile()`.\r\n\r\nThis PR:\r\n- Adds tests for `fs.isfile` (that use `fs.info`).\r\n- Fixes custom `BaseCompressedFileFileSystem.info` by removing its overriding.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3481\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3481\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3480","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3480\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3480\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3480\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3480","id":1088267110,"node_id":"I_kwDODunzps5A3aNm","number":3480,"title":"the compression format requested when saving a dataset in json format is not respected","user":{"login":"SaulLu","id":55560583,"node_id":"MDQ6VXNlcjU1NTYwNTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55560583?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SaulLu","html_url":"https:\/\/github.com\/SaulLu","followers_url":"https:\/\/api.github.com\/users\/SaulLu\/followers","following_url":"https:\/\/api.github.com\/users\/SaulLu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SaulLu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SaulLu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SaulLu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SaulLu\/orgs","repos_url":"https:\/\/api.github.com\/users\/SaulLu\/repos","events_url":"https:\/\/api.github.com\/users\/SaulLu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SaulLu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-24T09:23:51Z","updated_at":"2022-01-05T13:03:35Z","closed_at":"2022-01-05T13:03:35Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIn the documentation of the `to_json` method, it is stated in the parameters that \r\n> **to_json_kwargs \u2013 Parameters to pass to pandas\u2019s pandas.DataFrame.to_json.\r\n\r\nhowever when we pass for example `compression=\"gzip\"`, the saved file is not compressed.\r\n\r\nWould you also have expected compression to be applied? :relaxed: \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nmy_dict = {\"a\": [1, 2, 3], \"b\": [1, 2, 3]}\r\n```\r\n### Result with datasets\r\n```python\r\nfrom datasets import Dataset\r\n\r\ndataset = Dataset.from_dict(my_dict)\r\ndataset.to_json(\"dic_with_datasets.jsonl.gz\", compression=\"gzip\")\r\n!cat dic_with_datasets.jsonl.gz\r\n```\r\noutput\r\n```\r\n{\"a\":1,\"b\":1}\r\n{\"a\":2,\"b\":2}\r\n{\"a\":3,\"b\":3}\r\n```\r\nNote: I would expected to see binary data here\r\n\r\n### Result with pandas\r\n```python\r\nimport pandas as pd\r\n\r\ndf = pd.DataFrame(my_dict)\r\ndf.to_json(\"dic_with_pandas.jsonl.gz\", lines=True, orient=\"records\", compression=\"gzip\")\r\n!cat dic_with_pandas.jsonl.gz\r\n```\r\noutput\r\n```\r\n4\ufffd\ufffda\u0002\ufffddic_with_pandas.jsonl\ufffd\ufffdVJT\ufffd2\ufffdQJ\u0002\ufffd\ufffd\\\ufffd \ufffd\u0011\ufffdg\u0004\ufffd\u0019\ufffdy\u01b5\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\u0003\ufffd\ufffd\u000e\ufffd)\ufffd\ufffd\ufffd\r\n```\r\nNote: It looks like binary data\r\n\r\n## Expected results\r\n\r\nI would have expected that the saved result with datasets would also be a binary file\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux-4.18.0-193.70.1.el8_2.x86_64-x86_64-with-glibc2.17\r\n- Python version: 3.8.11\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3480\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3480\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3479","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3479\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3479\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3479\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3479","id":1088232880,"node_id":"I_kwDODunzps5A3R2w","number":3479,"title":"Dataset preview is not available (I think for all Hugging Face datasets)","user":{"login":"Abirate","id":66887439,"node_id":"MDQ6VXNlcjY2ODg3NDM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66887439?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Abirate","html_url":"https:\/\/github.com\/Abirate","followers_url":"https:\/\/api.github.com\/users\/Abirate\/followers","following_url":"https:\/\/api.github.com\/users\/Abirate\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Abirate\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Abirate\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Abirate\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Abirate\/orgs","repos_url":"https:\/\/api.github.com\/users\/Abirate\/repos","events_url":"https:\/\/api.github.com\/users\/Abirate\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Abirate\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"assignees":[{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-12-24T08:18:48Z","updated_at":"2021-12-24T14:27:46Z","closed_at":"2021-12-24T14:27:46Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*french_book_reviews*'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/Abirate\/french_book_reviews\r\n\r\n**short description of the issue**\r\nFor my dataset, the dataset preview is no longer functional (it used to work: The dataset had been added the day before and it was fine...) \r\nAnd, after looking over the datasets, I discovered that this issue affects all Hugging Face datasets (as of yesterday, December 23, 2021, around 10 p.m. (CET)). \r\n**Am I the one who added this dataset** : Yes \r\n\r\n**Note**: here a screenshot showing the issue\r\n![Dataset preview is not available for my dataset](https:\/\/user-images.githubusercontent.com\/66887439\/147333078-60734578-420d-4e91-8691-a90afeaa8948.jpg) \r\n\r\n**And here for glue dataset :** \r\n\r\n![Dataset preview is not available for other Hugging Face datasets(glue)](https:\/\/user-images.githubusercontent.com\/66887439\/147333492-26fa530c-befd-4992-8361-70c51397a25a.jpg)\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3479\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3479\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3478","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3478\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3478\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3478\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3478","id":1087860180,"node_id":"PR_kwDODunzps4wPMWq","number":3478,"title":"Extend support for streaming datasets that use os.walk","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-23T16:42:55Z","updated_at":"2021-12-24T10:50:20Z","closed_at":"2021-12-24T10:50:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3478","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3478","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3478.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3478.patch","merged_at":"2021-12-24T10:50:19Z"},"body":"This PR extends the support in streaming mode for datasets that use `os.walk`, by patching that function.\r\n\r\nThis PR adds support for streaming mode to datasets:\r\n1. autshumato\r\n1. code_x_glue_cd_code_to_text\r\n1. code_x_glue_tc_nl_code_search_adv\r\n1. nchlt\r\n\r\nCC: @severo ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3478\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3478\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3477","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3477\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3477\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3477\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3477","id":1087850253,"node_id":"PR_kwDODunzps4wPKPX","number":3477,"title":"Use `iter_files` instead of `str(Path(...)` in image dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-12-23T16:26:55Z","updated_at":"2021-12-28T15:15:02Z","closed_at":"2021-12-28T15:15:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3477","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3477","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3477.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3477.patch","merged_at":"2021-12-28T15:15:02Z"},"body":"Use `iter_files` in the `beans` and the `cats_vs_dogs` dataset scripts as suggested by @albertvillanova.\r\n\r\nAdditional changes:\r\n* Fix `iter_files` in `MockDownloadManager` (see this [CI error](https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/9247\/workflows\/2657ff8a-b531-4fd9-a9fc-6541a72e8d83\/jobs\/57028))\r\n* Add support for `os.path.isdir` and `os.path.isfile` in streaming (`os.path.isfile` is needed in `StreamingDownloadManager`'s `iter_files` to make `cats_vs_dogs` streamable)\r\n\r\nTODO:\r\n- [ ] add tests for `xisdir` and `xisfile`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3477\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3477\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3476","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3476\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3476\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3476\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3476","id":1087622872,"node_id":"PR_kwDODunzps4wOZ8a","number":3476,"title":"Extend support for streaming datasets that use ET.parse","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-23T11:18:46Z","updated_at":"2021-12-23T15:34:30Z","closed_at":"2021-12-23T15:34:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3476","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3476","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3476.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3476.patch","merged_at":"2021-12-23T15:34:30Z"},"body":"This PR extends the support in streaming mode for datasets that use `ET.parse`, by patching the function.\r\n\r\nThis PR adds support for streaming mode to datasets:\r\n1. ami\r\n1. assin\r\n1. assin2\r\n1. counter\r\n1. enriched_web_nlg\r\n1. europarl_bilingual\r\n1. hyperpartisan_news_detection\r\n1. polsum\r\n1. qa4mre\r\n1. quail\r\n1. ted_talks_iwslt\r\n1. udhr\r\n1. web_nlg\r\n1. winograd_wsc\r\n\r\nCC: @severo","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3476\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3476\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3475","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3475\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3475\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3475\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3475","id":1087352041,"node_id":"I_kwDODunzps5Az6zp","number":3475,"title":"The rotten_tomatoes dataset of movie reviews contains some reviews in Spanish ","user":{"login":"puzzler10","id":17426779,"node_id":"MDQ6VXNlcjE3NDI2Nzc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17426779?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/puzzler10","html_url":"https:\/\/github.com\/puzzler10","followers_url":"https:\/\/api.github.com\/users\/puzzler10\/followers","following_url":"https:\/\/api.github.com\/users\/puzzler10\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/puzzler10\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/puzzler10\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/puzzler10\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/puzzler10\/orgs","repos_url":"https:\/\/api.github.com\/users\/puzzler10\/repos","events_url":"https:\/\/api.github.com\/users\/puzzler10\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/puzzler10\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-23T03:56:43Z","updated_at":"2021-12-24T00:23:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nSee title. I don't think this is intentional and they probably should be removed. If they stay the dataset description should be at least updated to make it clear to the user. \r\n\r\n## Steps to reproduce the bug\r\nGo to the [dataset viewer](https:\/\/huggingface.co\/datasets\/viewer\/?dataset=rotten_tomatoes) for the dataset, set the offset to 4160 for the train dataset, and scroll through the results. I found ones at index 4166 and 4173. There's others too (e.g. index 2888) but those two are easy to find like that. \r\n\r\n## Expected results\r\nEnglish movie reviews only. \r\n\r\n## Actual results\r\nExample of a Spanish movie review (4173): \r\n> \"\u00c9 uma pena que , mais tarde , o pr\u00f3prio filme abandone o tom de par\u00f3dia e passe a utilizar os mesmos clich\u00eas que havia satirizado \"\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3475\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3475\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3474","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3474\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3474\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3474\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3474","id":1086945384,"node_id":"PR_kwDODunzps4wMMt0","number":3474,"title":"Decode images when iterating","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-22T15:34:49Z","updated_at":"2021-12-28T16:08:10Z","closed_at":"2021-12-28T16:08:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3474","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3474","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3474.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3474.patch","merged_at":null},"body":"If I iterate over a vision dataset, the images are not decoded, and the dictionary with the bytes is returned.\r\n\r\nThis PR enables image decoding in `Dataset.__iter__`\r\n\r\nClose https:\/\/github.com\/huggingface\/datasets\/issues\/3473","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3474\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3474\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3473","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3473\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3473\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3473\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3473","id":1086937610,"node_id":"I_kwDODunzps5AyVoK","number":3473,"title":"Iterating over a vision dataset doesn't decode the images","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-12-22T15:26:32Z","updated_at":"2021-12-27T14:13:21Z","closed_at":"2021-12-23T15:21:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nIf I load `mnist` and I iterate over the dataset, the images are not decoded, and the dictionary with the bytes is returned.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nimport PIL\r\n\r\nmnist = load_dataset(\"mnist\", split=\"train\")\r\n\r\nfirst_image = mnist[0][\"image\"]\r\nassert isinstance(first_image, PIL.PngImagePlugin.PngImageFile) # passes\r\nfirst_image = next(iter(mnist))[\"image\"]\r\nassert isinstance(first_image, PIL.PngImagePlugin.PngImageFile) # fails\r\n```\r\n\r\n## Expected results\r\n\r\nThe image should be decoded, as a PIL Image\r\n\r\n## Actual results\r\n\r\nWe get a dictionary\r\n```\r\n{'bytes': b'\\x89PNG\\r\\n\\x1a\\n\\x00..., 'path': None}\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.17.1.dev0\r\n- Platform: Darwin-20.6.0-x86_64-i386-64bit\r\n- Python version: 3.7.2\r\n- PyArrow version: 6.0.0\r\n\r\nThe bug also exists in 1.17.0\r\n\r\n## Investigation\r\n\r\nI think the issue is that decoding is disabled in `__iter__`:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/dfe5b73387c5e27de6a16b0caeb39d3b9ded66d6\/src\/datasets\/arrow_dataset.py#L1651-L1661\r\n\r\nDo you remember why it was disabled in the first place @albertvillanova ?\r\nAlso cc @mariosasko @NielsRogge \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3473\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3473\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3472","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3472\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3472\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3472\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3472","id":1086908508,"node_id":"PR_kwDODunzps4wMEwA","number":3472,"title":"Fix `str(Path(...))` conversion in streaming on Linux","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-22T15:06:03Z","updated_at":"2021-12-22T16:52:53Z","closed_at":"2021-12-22T16:52:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3472","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3472","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3472.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3472.patch","merged_at":"2021-12-22T16:52:52Z"},"body":"Fix `str(Path(...))` conversion in streaming on Linux. This should fix the streaming of the `beans` and `cats_vs_dogs` datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3472\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3472\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3471","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3471\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3471\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3471\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3471","id":1086588074,"node_id":"PR_kwDODunzps4wLAk6","number":3471,"title":"Fix Tashkeela dataset to yield stripped text","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-22T08:41:30Z","updated_at":"2021-12-22T10:12:08Z","closed_at":"2021-12-22T10:12:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3471","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3471","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3471.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3471.patch","merged_at":"2021-12-22T10:12:07Z"},"body":"This PR:\r\n- Yields stripped text\r\n- Fix path for Windows\r\n- Adds license\r\n- Adds more info in dataset card\r\n\r\nClose bigscience-workshop\/data_tooling#279","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3471\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3471\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3470","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3470\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3470\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3470\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3470","id":1086049888,"node_id":"PR_kwDODunzps4wJO8t","number":3470,"title":"Fix rendering of docs","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-21T17:17:01Z","updated_at":"2021-12-22T09:23:47Z","closed_at":"2021-12-22T09:23:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3470","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3470","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3470.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3470.patch","merged_at":"2021-12-22T09:23:47Z"},"body":"Minor fix in docs.\r\n\r\nCurrently, `ClassLabel` docstring rendering is not right.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3470\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3470\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3469","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3469\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3469\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3469\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3469","id":1085882664,"node_id":"PR_kwDODunzps4wIrOV","number":3469,"title":"Fix METEOR missing NLTK's omw-1.4","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-21T14:19:11Z","updated_at":"2021-12-21T14:52:28Z","closed_at":"2021-12-21T14:49:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3469","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3469","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3469.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3469.patch","merged_at":"2021-12-21T14:49:28Z"},"body":"NLTK 3.6.6 now requires `omw-1.4` to be downloaded for METEOR to work.\r\nThis should fix the CI on master","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3469\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3469\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3468","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3468\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3468\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3468\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3468","id":1085871301,"node_id":"PR_kwDODunzps4wIozO","number":3468,"title":"Add COCO dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-12-21T14:07:50Z","updated_at":"2022-01-03T14:22:06Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3468","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3468","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3468.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3468.patch","merged_at":null},"body":"This PR adds the MS COCO dataset. Compared to the [TFDS](https:\/\/github.com\/tensorflow\/datasets\/blob\/master\/tensorflow_datasets\/object_detection\/coco.py) script, this implementation adds 8 additional configs to cover the tasks other than object detection.\r\n\r\nSome notes:\r\n* the data exposed by TFDS is contained in the `2014`, `2015`, `2017` and `2017_panoptic_segmentation` configs here\r\n* I've updated `encode_nested_example` for easier handling of missing values (cc @lhoestq @albertvillanova; will add tests if you are OK with the changes in `features.py`)\r\n* this implementation should fix https:\/\/github.com\/huggingface\/datasets\/pull\/3377#issuecomment-985559427\r\n\r\nTODOs:\r\n- [x] dataset card\r\n- [ ] dummy data\r\n\r\ncc @merveenoyan \r\n\r\nCloses #2526 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3468\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3468\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3467","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3467\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3467\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3467\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3467","id":1085870665,"node_id":"PR_kwDODunzps4wIoqd","number":3467,"title":"Push dataset infos.json to Hub","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-21T14:07:13Z","updated_at":"2021-12-21T17:00:10Z","closed_at":"2021-12-21T17:00:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3467","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3467","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3467.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3467.patch","merged_at":"2021-12-21T17:00:09Z"},"body":"When doing `push_to_hub`, the feature types are lost (see issue https:\/\/github.com\/huggingface\/datasets\/issues\/3394).\r\nThis PR fixes this by also pushing a `dataset_infos.json` file to the Hub, that stores the feature types.\r\n\r\nOther minor changes:\r\n- renamed the `___` separator to `--`, since `--` is now disallowed in a name in the back-end.\r\n\r\nI tested this feature with datasets like conll2003 that has feature types like `ClassLabel` that were previously lost.\r\n\r\nClose https:\/\/github.com\/huggingface\/datasets\/issues\/3394\r\n\r\nI would like to include this in today's release (though not mandatory), so feel free to comment\/suggest changes","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3467\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3467\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3466","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3466\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3466\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3466\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3466","id":1085722837,"node_id":"PR_kwDODunzps4wII3w","number":3466,"title":"Add CRASS dataset","user":{"login":"apergo-ai","id":68908804,"node_id":"MDQ6VXNlcjY4OTA4ODA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68908804?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apergo-ai","html_url":"https:\/\/github.com\/apergo-ai","followers_url":"https:\/\/api.github.com\/users\/apergo-ai\/followers","following_url":"https:\/\/api.github.com\/users\/apergo-ai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apergo-ai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apergo-ai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apergo-ai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apergo-ai\/orgs","repos_url":"https:\/\/api.github.com\/users\/apergo-ai\/repos","events_url":"https:\/\/api.github.com\/users\/apergo-ai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apergo-ai\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-21T11:17:22Z","updated_at":"2022-01-07T08:14:56Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3466","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3466","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3466.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3466.patch","merged_at":null},"body":"Added crass dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3466\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3466\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3465","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3465\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3465\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3465\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3465","id":1085400432,"node_id":"I_kwDODunzps5AseVw","number":3465,"title":"Unable to load 'cnn_dailymail' dataset","user":{"login":"talha1503","id":42352729,"node_id":"MDQ6VXNlcjQyMzUyNzI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42352729?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/talha1503","html_url":"https:\/\/github.com\/talha1503","followers_url":"https:\/\/api.github.com\/users\/talha1503\/followers","following_url":"https:\/\/api.github.com\/users\/talha1503\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/talha1503\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/talha1503\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/talha1503\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/talha1503\/orgs","repos_url":"https:\/\/api.github.com\/users\/talha1503\/repos","events_url":"https:\/\/api.github.com\/users\/talha1503\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/talha1503\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":1935892865,"node_id":"MDU6TGFiZWwxOTM1ODkyODY1","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/duplicate","name":"duplicate","color":"cfd3d7","default":true,"description":"This issue or pull request already exists"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-21T03:32:21Z","updated_at":"2022-01-17T13:26:55Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI wanted to load cnn_dailymail dataset from huggingface datasets on Google Colab, but I am getting an error while loading it.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('cnn_dailymail', '3.0.0', ignore_verifications = True)\r\n```\r\n\r\n## Expected results\r\nExpecting to load 'cnn_dailymail' dataset.\r\n\r\n## Actual results\r\n`NotADirectoryError: [Errno 20] Not a directory: '\/root\/.cache\/huggingface\/datasets\/downloads\/1bc05d24fa6dda2468e83a73cf6dc207226e01e3c48a507ea716dc0421da583b\/cnn\/stories'`\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3465\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3465\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3464","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3464\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3464\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3464\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3464","id":1085399097,"node_id":"I_kwDODunzps5AseA5","number":3464,"title":"struct.error: 'i' format requires -2147483648 <= number <= 2147483647","user":{"login":"koukoulala","id":30341159,"node_id":"MDQ6VXNlcjMwMzQxMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30341159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/koukoulala","html_url":"https:\/\/github.com\/koukoulala","followers_url":"https:\/\/api.github.com\/users\/koukoulala\/followers","following_url":"https:\/\/api.github.com\/users\/koukoulala\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/koukoulala\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/koukoulala\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/koukoulala\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/koukoulala\/orgs","repos_url":"https:\/\/api.github.com\/users\/koukoulala\/repos","events_url":"https:\/\/api.github.com\/users\/koukoulala\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/koukoulala\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-21T03:29:01Z","updated_at":"2022-01-05T15:23:33Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\nusing latest datasets=datasets-1.16.1-py3-none-any.whl\r\nprocess my own multilingual dataset by following codes, and the number of rows in all dataset is 306000, the max_length of each sentence is 256:\r\n![image](https:\/\/user-images.githubusercontent.com\/30341159\/146865779-3d25d011-1f42-4026-9e1b-76f6e1d172e9.png)\r\n\r\nthen I get this error:\r\n![image](https:\/\/user-images.githubusercontent.com\/30341159\/146865844-e60a404c-5f3a-403c-b2f1-acd943b5cdb8.png)\r\n\r\nI have seen the issue in #2134 and #2150, so I don't understand why latest repo still can't deal with big dataset.\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform: linux docker\r\n- Python version: 3.6\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3464\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3464\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3463","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3463\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3463\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3463\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3463","id":1085078795,"node_id":"PR_kwDODunzps4wGB4P","number":3463,"title":"Update swahili_news dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-20T18:20:20Z","updated_at":"2021-12-21T06:24:03Z","closed_at":"2021-12-21T06:24:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3463","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3463","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3463.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3463.patch","merged_at":"2021-12-21T06:24:01Z"},"body":"Update dataset with latest verion data files.\r\n\r\nFix #3462.\r\nClose bigscience-workshop\/data_tooling#107","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3463\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3463\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3462","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3462\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3462\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3462\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3462","id":1085049661,"node_id":"I_kwDODunzps5ArIs9","number":3462,"title":"Update swahili_news dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-12-20T17:44:01Z","updated_at":"2021-12-21T06:24:02Z","closed_at":"2021-12-21T06:24:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Please note also: the HuggingFace version at https:\/\/huggingface.co\/datasets\/swahili_news is outdated. An updated version, with deduplicated text and official splits, can be found at https:\/\/zenodo.org\/record\/5514203.\r\n\r\n## Adding a Dataset\r\n- **Name:** swahili_news\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nRelated to:\r\n- bigscience-workshop\/data_tooling#107\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3462\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3462\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3461","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3461\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3461\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3461\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3461","id":1085007346,"node_id":"PR_kwDODunzps4wFzDP","number":3461,"title":"Fix links in metrics description","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-20T16:56:19Z","updated_at":"2021-12-20T17:14:52Z","closed_at":"2021-12-20T17:14:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3461","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3461","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3461.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3461.patch","merged_at":"2021-12-20T17:14:51Z"},"body":"Remove Markdown syntax for links in metrics description, as it is not properly rendered.\r\n\r\nRelated to #3437.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3461\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3461\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3460","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3460\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3460\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3460\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3460","id":1085002469,"node_id":"PR_kwDODunzps4wFyCf","number":3460,"title":"Don't encode lists as strings when using `Value(\"string\")`","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-20T16:50:49Z","updated_at":"2021-12-20T17:04:51Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3460","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3460","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3460.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3460.patch","merged_at":null},"body":"Following https:\/\/github.com\/huggingface\/datasets\/pull\/3456#event-5792250497 it looks like `datasets` can silently convert lists to strings using `str()`, instead of raising an error.\r\n\r\nThis PR fixes this and should fix the issue with WER showing low values if the input format is not right.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3460\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3460\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3459","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3459\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3459\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3459\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3459","id":1084969672,"node_id":"I_kwDODunzps5Aq1LI","number":3459,"title":"dataset.filter overwriting previously set dataset._indices values, resulting in the wrong elements being selected.","user":{"login":"mmajurski","id":9354454,"node_id":"MDQ6VXNlcjkzNTQ0NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9354454?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mmajurski","html_url":"https:\/\/github.com\/mmajurski","followers_url":"https:\/\/api.github.com\/users\/mmajurski\/followers","following_url":"https:\/\/api.github.com\/users\/mmajurski\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mmajurski\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mmajurski\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mmajurski\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mmajurski\/orgs","repos_url":"https:\/\/api.github.com\/users\/mmajurski\/repos","events_url":"https:\/\/api.github.com\/users\/mmajurski\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mmajurski\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-20T16:16:49Z","updated_at":"2021-12-20T16:34:57Z","closed_at":"2021-12-20T16:34:57Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen using dataset.select to select a subset of a dataset, dataset._indices are set to indicate which elements are now considered in the dataset.\r\nThe same thing happens when you shuffle the dataset; dataset._indices are set to indicate what the new order of the data is.\r\nHowever, if you then use a dataset.filter, that filter interacts with those dataset._indices values in a non-intuitive manner.\r\nhttps:\/\/huggingface.co\/docs\/datasets\/_modules\/datasets\/arrow_dataset.html#Dataset.filter\r\n\r\nEffectively, it looks like the original set of _indices were discared and overwritten by the set created during the filter operation.\r\n\r\nI think this is actually an issue with how the map function handles dataset._indices. Ideally it should use the _indices it gets passed, and then return an updated _indices which reflect the map transformation applied to the starting _indices.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset = load_dataset('imdb', split='train', keep_in_memory=True)\r\n\r\ndataset = dataset.shuffle(keep_in_memory=True)\r\n\r\ndataset = dataset.select(range(0, 10), keep_in_memory=True)\r\nprint(\"initial 10 elements\")\r\nprint(dataset['label']) # -> [1, 1, 0, 1, 0, 0, 0, 1, 0, 0]\r\n\r\ndataset = dataset.filter(lambda x: x['label'] == 0, keep_in_memory=True)\r\nprint(\"filtered 10 elements looking for label 0\")\r\nprint(dataset['label']) # -> [1, 1, 1, 1, 1, 1]\r\n```\r\n\r\n## Actual results\r\n```\r\n$ python indices_bug.py\r\ninitial 10 elements\r\n[1, 1, 0, 1, 0, 0, 0, 1, 0, 0]\r\nfiltered 10 elements looking for label 0\r\n[1, 1, 1, 1, 1, 1]\r\n```\r\n\r\nThis code block first shuffles the dataset (to get a mix of label 0 and label 1).\r\nThen it selects just the first 10 elements (the number of elements does not matter, 10 is just easy to visualize). The important part is that you select some subset of the dataset. \r\nFinally, a filter is applied to pull out just the elements with `label == 0`.\r\n\r\nThe bug is that you cannot combine any dataset operation which sets the dataset._indices with filter.\r\nIn this case I have 2, shuffle and subset.\r\n\r\nIf you just use a single dataset._indices operation (in this case shuffle) the bug still shows up.\r\n\r\nThe shuffle sets the dataset._indices and then filter uses those indices in the map, then overwrites dataset._indices with the filter results.\r\n```python\r\ndataset = load_dataset('imdb', split='train', keep_in_memory=True)\r\n\r\ndataset = dataset.shuffle(keep_in_memory=True)\r\n\r\ndataset = dataset.filter(lambda x: x['label'] == 0, keep_in_memory=True)\r\n\r\ndataset = dataset.select(range(0, 10), keep_in_memory=True)\r\nprint(dataset['label']) # -> [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]\r\n```\r\n\r\n## Expected results\r\nIn an ideal world, the dataset filter would respect any dataset._indices values which had previously been set.\r\n\r\nIf you use dataset.filter with the base dataset (where dataset._indices has not been set) then the filter command works as expected.\r\n\r\n## Environment info\r\nHere are the commands required to rebuild the conda environment from scratch.\r\n```\r\n# create a virtual environment\r\nconda create -n dataset_indices python=3.8 -y\r\n\r\n# activate the virtual environment\r\nconda activate dataset_indices\r\n\r\n# install huggingface datasets\r\nconda install datasets\r\n```\r\n\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-5.11.0-41-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.12\r\n- PyArrow version: 3.0.0\r\n\r\n\r\n### Full Conda Environment\r\n```\r\n$ conda env export\r\nname: dasaset_indices\r\nchannels:\r\n - defaults\r\ndependencies:\r\n - _libgcc_mutex=0.1=main\r\n - _openmp_mutex=4.5=1_gnu\r\n - abseil-cpp=20210324.2=h2531618_0\r\n - aiohttp=3.8.1=py38h7f8727e_0\r\n - aiosignal=1.2.0=pyhd3eb1b0_0\r\n - arrow-cpp=3.0.0=py38h6b21186_4\r\n - attrs=21.2.0=pyhd3eb1b0_0\r\n - aws-c-common=0.4.57=he6710b0_1\r\n - aws-c-event-stream=0.1.6=h2531618_5\r\n - aws-checksums=0.1.9=he6710b0_0\r\n - aws-sdk-cpp=1.8.185=hce553d0_0\r\n - bcj-cffi=0.5.1=py38h295c915_0\r\n - blas=1.0=mkl\r\n - boost-cpp=1.73.0=h27cfd23_11\r\n - bottleneck=1.3.2=py38heb32a55_1\r\n - brotli=1.0.9=he6710b0_2\r\n - brotli-python=1.0.9=py38heb0550a_2\r\n - brotlicffi=1.0.9.2=py38h295c915_0\r\n - brotlipy=0.7.0=py38h27cfd23_1003\r\n - bzip2=1.0.8=h7b6447c_0\r\n - c-ares=1.17.1=h27cfd23_0\r\n - ca-certificates=2021.10.26=h06a4308_2\r\n - certifi=2021.10.8=py38h06a4308_0\r\n - cffi=1.14.6=py38h400218f_0\r\n - conllu=4.4.1=pyhd3eb1b0_0\r\n - cryptography=36.0.0=py38h9ce1e76_0\r\n - dataclasses=0.8=pyh6d0b6a4_7\r\n - dill=0.3.4=pyhd3eb1b0_0\r\n - double-conversion=3.1.5=he6710b0_1\r\n - et_xmlfile=1.1.0=py38h06a4308_0\r\n - filelock=3.4.0=pyhd3eb1b0_0\r\n - frozenlist=1.2.0=py38h7f8727e_0\r\n - gflags=2.2.2=he6710b0_0\r\n - glog=0.5.0=h2531618_0\r\n - gmp=6.2.1=h2531618_2\r\n - grpc-cpp=1.39.0=hae934f6_5\r\n - huggingface_hub=0.0.17=pyhd3eb1b0_0\r\n - icu=58.2=he6710b0_3\r\n - idna=3.3=pyhd3eb1b0_0\r\n - importlib-metadata=4.8.2=py38h06a4308_0\r\n - importlib_metadata=4.8.2=hd3eb1b0_0\r\n - intel-openmp=2021.4.0=h06a4308_3561\r\n - krb5=1.19.2=hac12032_0\r\n - ld_impl_linux-64=2.35.1=h7274673_9\r\n - libboost=1.73.0=h3ff78a5_11\r\n - libcurl=7.80.0=h0b77cf5_0\r\n - libedit=3.1.20210910=h7f8727e_0\r\n - libev=4.33=h7f8727e_1\r\n - libevent=2.1.8=h1ba5d50_1\r\n - libffi=3.3=he6710b0_2\r\n - libgcc-ng=9.3.0=h5101ec6_17\r\n - libgomp=9.3.0=h5101ec6_17\r\n - libnghttp2=1.46.0=hce63b2e_0\r\n - libprotobuf=3.17.2=h4ff587b_1\r\n - libssh2=1.9.0=h1ba5d50_1\r\n - libstdcxx-ng=9.3.0=hd4cf53a_17\r\n - libthrift=0.14.2=hcc01f38_0\r\n - libxml2=2.9.12=h03d6c58_0\r\n - libxslt=1.1.34=hc22bd24_0\r\n - lxml=4.6.3=py38h9120a33_0\r\n - lz4-c=1.9.3=h295c915_1\r\n - mkl=2021.4.0=h06a4308_640\r\n - mkl-service=2.4.0=py38h7f8727e_0\r\n - mkl_fft=1.3.1=py38hd3c417c_0\r\n - mkl_random=1.2.2=py38h51133e4_0\r\n - multiprocess=0.70.12.2=py38h7f8727e_0\r\n - multivolumefile=0.2.3=pyhd3eb1b0_0\r\n - ncurses=6.3=h7f8727e_2\r\n - numexpr=2.7.3=py38h22e1b3c_1\r\n - numpy=1.21.2=py38h20f2e39_0\r\n - numpy-base=1.21.2=py38h79a1101_0\r\n - openpyxl=3.0.9=pyhd3eb1b0_0\r\n - openssl=1.1.1l=h7f8727e_0\r\n - orc=1.6.9=ha97a36c_3\r\n - packaging=21.3=pyhd3eb1b0_0\r\n - pip=21.2.4=py38h06a4308_0\r\n - py7zr=0.16.1=pyhd3eb1b0_1\r\n - pycparser=2.21=pyhd3eb1b0_0\r\n - pycryptodomex=3.10.1=py38h27cfd23_1\r\n - pyopenssl=21.0.0=pyhd3eb1b0_1\r\n - pyparsing=3.0.4=pyhd3eb1b0_0\r\n - pyppmd=0.16.1=py38h295c915_0\r\n - pysocks=1.7.1=py38h06a4308_0\r\n - python=3.8.12=h12debd9_0\r\n - python-dateutil=2.8.2=pyhd3eb1b0_0\r\n - python-xxhash=2.0.2=py38h7f8727e_0\r\n - pyzstd=0.14.4=py38h7f8727e_3\r\n - re2=2020.11.01=h2531618_1\r\n - readline=8.1=h27cfd23_0\r\n - requests=2.26.0=pyhd3eb1b0_0\r\n - setuptools=58.0.4=py38h06a4308_0\r\n - six=1.16.0=pyhd3eb1b0_0\r\n - snappy=1.1.8=he6710b0_0\r\n - sqlite=3.36.0=hc218d9a_0\r\n - texttable=1.6.4=pyhd3eb1b0_0\r\n - tk=8.6.11=h1ccaba5_0\r\n - typing_extensions=3.10.0.2=pyh06a4308_0\r\n - uriparser=0.9.3=he6710b0_1\r\n - utf8proc=2.6.1=h27cfd23_0\r\n - wheel=0.37.0=pyhd3eb1b0_1\r\n - xxhash=0.8.0=h7f8727e_3\r\n - xz=5.2.5=h7b6447c_0\r\n - zipp=3.6.0=pyhd3eb1b0_0\r\n - zlib=1.2.11=h7f8727e_4\r\n - zstd=1.4.9=haebb681_0\r\n - pip:\r\n - async-timeout==4.0.2\r\n - charset-normalizer==2.0.9\r\n - datasets==1.16.1\r\n - fsspec==2021.11.1\r\n - huggingface-hub==0.2.1\r\n - multidict==5.2.0\r\n - pandas==1.3.5\r\n - pyarrow==6.0.1\r\n - pytz==2021.3\r\n - pyyaml==6.0\r\n - tqdm==4.62.3\r\n - typing-extensions==4.0.1\r\n - urllib3==1.26.7\r\n - yarl==1.7.2\r\n\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3459\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3459\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3458","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3458\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3458\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3458\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3458","id":1084926025,"node_id":"PR_kwDODunzps4wFiRb","number":3458,"title":"Fix duplicated tag in wikicorpus dataset card","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-20T15:34:16Z","updated_at":"2021-12-20T16:03:25Z","closed_at":"2021-12-20T16:03:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3458","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3458","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3458.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3458.patch","merged_at":"2021-12-20T16:03:24Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3458\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3458\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3457","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3457\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3457\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3457\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3457","id":1084862121,"node_id":"I_kwDODunzps5Aqa6p","number":3457,"title":"Add CMU Graphics Lab Motion Capture dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-20T14:34:39Z","updated_at":"2021-12-20T15:22:16Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** CMU Graphics Lab Motion Capture database\r\n- **Description:** The database contains free motions which you can download and use. \r\n- **Data:** http:\/\/mocap.cs.cmu.edu\/\r\n- **Motivation:** Nice motion capture dataset\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3457\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3457\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3456","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3456\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3456\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3456\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3456","id":1084687973,"node_id":"PR_kwDODunzps4wEwXz","number":3456,"title":"[WER] Better error message for wer","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-12-20T11:38:40Z","updated_at":"2021-12-20T16:53:37Z","closed_at":"2021-12-20T16:53:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3456","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3456","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3456.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3456.patch","merged_at":null},"body":"Currently we have the following problem when using the WER. When the input format to the WER metric is wrong, instead of throwing an error message a word-error-rate is computed which is incorrect. E.g. when doing the following: \r\n\r\n\r\n```python\r\nfrom datasets import load_metric\r\n\r\nwer = load_metric(\"wer\")\r\n\r\ntarget_str = [\"hello this is nice\", \"hello the weather is bloomy\"]\r\npred_str = [[\"hello it's nice\"], [\"hello it's the weather\"]]\r\n\r\nprint(\"Wrong:\", wer.compute(predictions=pred_str, references=target_str))\r\nprint(\"Correct\", wer.compute(predictions=[x[0] for x in pred_str], references=target_str))\r\n```\r\nWe get:\r\n```\r\nWrong: 1.0\r\nCorrect 0.5555555555555556\r\n```\r\n\r\nmeaning that we get a word-error rate for incorrectly passed input formats. We should raise an error here instead so that people don't spent hours fixing a model while it's their incorrect evaluation metric is the problem for a low WER.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3456\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3456\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3455","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3455\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3455\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3455\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3455","id":1084599650,"node_id":"I_kwDODunzps5Apa1i","number":3455,"title":"Easier information editing","user":{"login":"borgr","id":6416600,"node_id":"MDQ6VXNlcjY0MTY2MDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6416600?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borgr","html_url":"https:\/\/github.com\/borgr","followers_url":"https:\/\/api.github.com\/users\/borgr\/followers","following_url":"https:\/\/api.github.com\/users\/borgr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borgr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borgr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borgr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borgr\/orgs","repos_url":"https:\/\/api.github.com\/users\/borgr\/repos","events_url":"https:\/\/api.github.com\/users\/borgr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borgr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-20T10:10:43Z","updated_at":"2021-12-20T14:48:59Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nIt requires a lot of effort to improve a datasheet. \r\n\r\n**Describe the solution you'd like**\r\nUI or at least a link to the place where the code that needs to be edited is (and an easy way to edit this code directly from the site, without cloning, branching, makefile etc.)\r\n\r\n**Describe alternatives you've considered**\r\nThe current Ux is to have the 8 steps for contribution while One just wishes to change a line a type etc.\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3455\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3455\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3454","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3454\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3454\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3454\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3454","id":1084519107,"node_id":"PR_kwDODunzps4wENam","number":3454,"title":"Fix iter_archive generator","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-20T08:50:15Z","updated_at":"2021-12-20T10:05:00Z","closed_at":"2021-12-20T10:04:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3454","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3454","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3454.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3454.patch","merged_at":"2021-12-20T10:04:59Z"},"body":"This PR:\r\n- Adds tests to DownloadManager and StreamingDownloadManager `iter_archive` for both path and file inputs\r\n- Fixes bugs in `iter_archive` introduced in:\r\n - #3443\r\n\r\n\r\nFix #3453.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3454\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3454\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3453","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3453\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3453\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3453\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3453","id":1084515911,"node_id":"I_kwDODunzps5ApGZH","number":3453,"title":"ValueError while iter_archive","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-12-20T08:46:18Z","updated_at":"2021-12-20T10:04:59Z","closed_at":"2021-12-20T10:04:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAfter the merge of:\r\n- #3443\r\n\r\nthe method `iter_archive` throws a ValueError:\r\n```\r\nValueError: read of closed file\r\n```\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfor path, file in dl_manager.iter_archive(archive_path):\r\n pass\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3453\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3453\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3452","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3452\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3452\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3452\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3452","id":1083803178,"node_id":"I_kwDODunzps5AmYYq","number":3452,"title":"why the stratify option is omitted from test_train_split function?","user":{"login":"j-sieger","id":9985334,"node_id":"MDQ6VXNlcjk5ODUzMzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9985334?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/j-sieger","html_url":"https:\/\/github.com\/j-sieger","followers_url":"https:\/\/api.github.com\/users\/j-sieger\/followers","following_url":"https:\/\/api.github.com\/users\/j-sieger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/j-sieger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/j-sieger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/j-sieger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/j-sieger\/orgs","repos_url":"https:\/\/api.github.com\/users\/j-sieger\/repos","events_url":"https:\/\/api.github.com\/users\/j-sieger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/j-sieger\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3761482852,"node_id":"LA_kwDODunzps7gM6xk","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20second%20issue","name":"good second issue","color":"BDE59C","default":false,"description":"Issues a bit more difficult than \"Good First\" issues"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-18T10:37:47Z","updated_at":"2022-01-24T15:03:45Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"why the stratify option is omitted from test_train_split function?\r\n\r\nis there any other way implement the stratify option while splitting the dataset? as it is important point to be considered while splitting the dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3452\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3452\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3451","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3451\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3451\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3451\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3451","id":1083459137,"node_id":"PR_kwDODunzps4wA5LP","number":3451,"title":"[Staging] Update dataset repos automatically on the Hub","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-17T17:12:11Z","updated_at":"2021-12-21T10:25:46Z","closed_at":"2021-12-20T14:09:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3451","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3451","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3451.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3451.patch","merged_at":"2021-12-20T14:09:51Z"},"body":"Let's have a script that updates the dataset repositories on staging for now. This way we can make sure it works fine before going in prod.\r\n\r\nRelated to https:\/\/github.com\/huggingface\/datasets\/issues\/3341\r\n\r\nThe script runs on each commit on `master`. It checks the datasets that were changed, and it pushes the changes to the corresponding repositories on the Hub.\r\n\r\nIf there's a new dataset, then a new repository is created.\r\n\r\nIf the commit is a new release of `datasets`, it also pushes the tag to all the repositories.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3451\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3451\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3450","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3450\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3450\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3450\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3450","id":1083450158,"node_id":"I_kwDODunzps5AlCMu","number":3450,"title":"Unexpected behavior doing Split + Filter","user":{"login":"jbrachat","id":26432605,"node_id":"MDQ6VXNlcjI2NDMyNjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26432605?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jbrachat","html_url":"https:\/\/github.com\/jbrachat","followers_url":"https:\/\/api.github.com\/users\/jbrachat\/followers","following_url":"https:\/\/api.github.com\/users\/jbrachat\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jbrachat\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jbrachat\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jbrachat\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jbrachat\/orgs","repos_url":"https:\/\/api.github.com\/users\/jbrachat\/repos","events_url":"https:\/\/api.github.com\/users\/jbrachat\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jbrachat\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-17T17:00:39Z","updated_at":"2021-12-20T14:51:37Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI observed unexpected behavior when applying 'train_test_split' followed by 'filter' on dataset. Elements of the training dataset eventually end up in the test dataset (after applying the 'filter') \r\n\r\n## Steps to reproduce the bug\r\n```\r\nfrom datasets import Dataset\r\nimport pandas as pd\r\ndic = {'x': [1,2,3,4,5,6,7,8,9], 'y':['q','w','e','r','t','y','u','i','o']}\r\ndf = pd.DataFrame.from_dict(dic)\r\ndataset = Dataset.from_pandas(df)\r\nsplit_dataset = dataset.train_test_split(test_size=0.5, shuffle=False, seed=42)\r\ntrain_dataset = split_dataset[\"train\"]\r\neval_dataset = split_dataset[\"test\"]\r\neval_dataset_2 = eval_dataset.filter(lambda example: example['x'] % 2 == 0)\r\nprint( eval_dataset['x'])\r\nprint(eval_dataset_2['x'])\r\n```\r\nOne observes that elements in eval_dataset2 are actually coming from the training dataset...\r\n\r\n## Expected results\r\nThe expected results would be that the filtered eval dataset would only contain elements from the original eval dataset.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Windows 10\r\n- Python version: 3.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3450\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3450\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3449","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3449\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3449\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3449\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3449","id":1083373018,"node_id":"I_kwDODunzps5AkvXa","number":3449,"title":"Add `__add__()`, `__iadd__()` and similar to `Dataset` class","user":{"login":"sgraaf","id":8904453,"node_id":"MDQ6VXNlcjg5MDQ0NTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8904453?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgraaf","html_url":"https:\/\/github.com\/sgraaf","followers_url":"https:\/\/api.github.com\/users\/sgraaf\/followers","following_url":"https:\/\/api.github.com\/users\/sgraaf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgraaf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgraaf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgraaf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgraaf\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgraaf\/repos","events_url":"https:\/\/api.github.com\/users\/sgraaf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgraaf\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-17T15:29:11Z","updated_at":"2021-12-24T11:06:10Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nNo.\r\n\r\n**Describe the solution you'd like**\r\nI would like to be able to concatenate datasets as follows:\r\n```python\r\n>>> dataset[\"train\"] += dataset[\"validation\"]\r\n```\r\n\r\n... instead of using `concatenate_datasets()`:\r\n```python\r\n>>> raw_datasets[\"train\"] = concatenate_datasets([raw_datasets[\"train\"], raw_datasets[\"validation\"]])\r\n>>> del raw_datasets[\"validation\"]\r\n```\r\n\r\n**Describe alternatives you've considered**\r\nWell, I have considered `concatenate_datasets()` \ud83d\ude00\r\n\r\n**Additional context**\r\nN.a.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3449\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3449\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3448","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3448\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3448\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3448\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3448","id":1083231080,"node_id":"I_kwDODunzps5AkMto","number":3448,"title":"JSONDecodeError with HuggingFace dataset viewer","user":{"login":"kathrynchapman","id":57716109,"node_id":"MDQ6VXNlcjU3NzE2MTA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57716109?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kathrynchapman","html_url":"https:\/\/github.com\/kathrynchapman","followers_url":"https:\/\/api.github.com\/users\/kathrynchapman\/followers","following_url":"https:\/\/api.github.com\/users\/kathrynchapman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kathrynchapman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kathrynchapman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kathrynchapman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kathrynchapman\/orgs","repos_url":"https:\/\/api.github.com\/users\/kathrynchapman\/repos","events_url":"https:\/\/api.github.com\/users\/kathrynchapman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kathrynchapman\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-17T12:52:41Z","updated_at":"2021-12-20T14:00:52Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for 'pubmed_neg'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/IGESML\/pubmed_neg\r\n\r\nI am getting the error: \r\nStatus code: 400\r\nException: JSONDecodeError\r\nMessage: Expecting property name enclosed in double quotes: line 61 column 2 (char 1202)\r\n\r\nI have checked all files - I am not using single quotes anywhere. Not sure what is causing this issue.\r\n\r\nAm I the one who added this dataset ? Yes\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3448\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3448\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3447","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3447\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3447\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3447\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3447","id":1082539790,"node_id":"I_kwDODunzps5Ahj8O","number":3447,"title":"HF_DATASETS_OFFLINE=1 didn't stop datasets.builder from downloading ","user":{"login":"dunalduck0","id":51274745,"node_id":"MDQ6VXNlcjUxMjc0NzQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/51274745?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dunalduck0","html_url":"https:\/\/github.com\/dunalduck0","followers_url":"https:\/\/api.github.com\/users\/dunalduck0\/followers","following_url":"https:\/\/api.github.com\/users\/dunalduck0\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dunalduck0\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dunalduck0\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dunalduck0\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dunalduck0\/orgs","repos_url":"https:\/\/api.github.com\/users\/dunalduck0\/repos","events_url":"https:\/\/api.github.com\/users\/dunalduck0\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dunalduck0\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-16T18:51:13Z","updated_at":"2021-12-20T11:43:29Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAccording to https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#loading-a-dataset-builder, setting HF_DATASETS_OFFLINE to 1 should make datasets to \"run in full offline mode\". It didn't work for me. At the very beginning, datasets still tried to download \"custom data configuration\" for JSON, despite I have run the program once and cached all data into the same --cache_dir. \r\n\r\n\"Downloading\" is not an issue when running with local disk, but crashes often with cloud storage because (1) multiply GPU processes try to access the same file, AND (2) FileLocker fails to synchronize all processes, due to storage throttling. 99% of times, when the main process releases FileLocker, the file is not actually ready for access in cloud storage and thus triggers \"FileNotFound\" errors for all other processes. Well, another way to resolve the problem is to investigate super reliable cloud storage, but that's out of scope here.\r\n\r\n## Steps to reproduce the bug\r\n```\r\nexport HF_DATASETS_OFFLINE=1\r\npython run_clm.py --model_name_or_path=models\/gpt-j-6B --train_file=trainpy.v2.train.json --validation_file=trainpy.v2.eval.json --cache_dir=datacache\/trainpy.v2\r\n```\r\n\r\n## Expected results\r\ndatasets should stop all \"downloading\" behavior but reuse the cached JSON configuration. I think the problem here is part of the cache directory path, \"default-471372bed4b51b53\", is randomly generated, and it could change if some parameters changed. And I didn't find a way to use a fixed path to ensure datasets to reuse cached data every time.\r\n\r\n## Actual results\r\nThe logging shows datasets are still downloading into \"datacache\/trainpy.v2\/json\/default-471372bed4b51b53\/0.0.0\/c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426\". \r\n```\r\n12\/16\/2021 10:25:59 - WARNING - datasets.builder - Using custom data configuration default-471372bed4b51b53\r\n12\/16\/2021 10:25:59 - INFO - datasets.builder - Generating dataset json (datacache\/trainpy.v2\/json\/default-471372bed4b51b53\/0.0.0\/c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426)\r\nDownloading and preparing dataset json\/default to datacache\/trainpy.v2\/json\/default-471372bed4b51b53\/0.0.0\/c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426...\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 17623.13it\/s]\r\n12\/16\/2021 10:25:59 - INFO - datasets.utils.download_manager - Downloading took 0.0 min\r\n12\/16\/2021 10:26:00 - INFO - datasets.utils.download_manager - Checksum Computation took 0.0 min\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 1206.99it\/s]\r\n12\/16\/2021 10:26:00 - INFO - datasets.utils.info_utils - Unable to verify checksums.\r\n12\/16\/2021 10:26:00 - INFO - datasets.builder - Generating split train\r\n12\/16\/2021 10:26:01 - INFO - datasets.builder - Generating split validation\r\n12\/16\/2021 10:26:02 - INFO - datasets.utils.info_utils - Unable to verify splits sizes.\r\nDataset json downloaded and prepared to datacache\/trainpy.v2\/json\/default-471372bed4b51b53\/0.0.0\/c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426. Subsequent calls will reuse this data.\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 53.54it\/s]\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux\r\n- Python version: 3.8.10\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3447\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3447\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3446","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3446\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3446\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3446\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3446","id":1082414229,"node_id":"PR_kwDODunzps4v9dFM","number":3446,"title":"Remove redundant local path information in audio\/image datasets","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-16T16:35:15Z","updated_at":"2021-12-16T17:30:04Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3446","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3446","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3446.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3446.patch","merged_at":null},"body":"Remove the redundant path information in the audio\/image dataset as discussed in https:\/\/github.com\/huggingface\/datasets\/pull\/3430#issuecomment-994734828\r\n\r\nTODOs:\r\n* [ ] merge https:\/\/github.com\/huggingface\/datasets\/pull\/3430\r\n* [ ] merge https:\/\/github.com\/huggingface\/datasets\/pull\/3364\r\n* [ ] re-generate the info files of the updated audio datasets\r\n\r\ncc: @patrickvonplaten @anton-l @nateraw (I expect this to break the audio\/vision examples in Transformers; after this change you'll be able to access underlying paths as follows `dset = dset.cast_column(\"audio\", Audio(..., decode=False)); path = dset[0][\"audio\"]`)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3446\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3446\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3445","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3445\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3445\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3445\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3445","id":1082370968,"node_id":"I_kwDODunzps5Ag6uY","number":3445,"title":"question","user":{"login":"BAKAYOKO0232","id":38075175,"node_id":"MDQ6VXNlcjM4MDc1MTc1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38075175?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BAKAYOKO0232","html_url":"https:\/\/github.com\/BAKAYOKO0232","followers_url":"https:\/\/api.github.com\/users\/BAKAYOKO0232\/followers","following_url":"https:\/\/api.github.com\/users\/BAKAYOKO0232\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BAKAYOKO0232\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BAKAYOKO0232\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BAKAYOKO0232\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BAKAYOKO0232\/orgs","repos_url":"https:\/\/api.github.com\/users\/BAKAYOKO0232\/repos","events_url":"https:\/\/api.github.com\/users\/BAKAYOKO0232\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BAKAYOKO0232\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-16T15:57:00Z","updated_at":"2022-01-03T10:09:00Z","closed_at":"2022-01-03T10:09:00Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*name of the dataset*'\r\n\r\n**Link:** *link to the dataset viewer page*\r\n\r\n*short description of the issue*\r\n\r\nAm I the one who added this dataset ? Yes-No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3445\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3445\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3444","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3444\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3444\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3444\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3444","id":1082078961,"node_id":"I_kwDODunzps5Afzbx","number":3444,"title":"Align the Dataset and IterableDataset processing API","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-16T11:26:11Z","updated_at":"2021-12-21T15:15:40Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Intro\r\n\r\nCurrently the two classes have two distinct API for processing:\r\n\r\n### The `.map()` method\r\n\r\nBoth have those parameters in common: function, batched, batch_size\r\n\r\n- IterableDataset is missing those parameters:\r\nwith_indices, with_rank, input_columns, drop_last_batch, remove_columns, features, disable_nullable, fn_kwargs, num_proc\r\n\r\n- Dataset also has additional parameters that are exclusive, due to caching:\r\nkeep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, suffix_template, new_fingerprint\r\n\r\n- There is also an important difference in terms of behavior:\r\n**Dataset.map adds new columns** (with dict.update)\r\nBUT\r\n**IterableDataset discards previous columns** (it overwrites the dict)\r\nIMO the two methods should have the same behavior. This would be an important breaking change though.\r\n\r\n- Dataset.map is eager while IterableDataset.map is lazy\r\n\r\n### The `.shuffle()` method\r\n\r\n- Both have an optional seed parameter, but IterableDataset requires a mandatory parameter buffer_size to control the size of the local buffer used for approximate shuffling.\r\n\r\n- IterableDataset is missing the parameter generator\r\n\r\n- Also Dataset has exclusive parameters due to caching: keep_in_memory, load_from_cache_file, indices_cache_file_name, writer_batch_size, new_fingerprint\r\n\r\n### The `.with_format()` method\r\n\r\n- IterableDataset only supports \"torch\" (it misses tf, jax, pandas, arrow) and is missing the parameters: columns, output_all_columns and format_kwargs\r\n\r\n### Other methods\r\n\r\n- Both have the same `remove_columns` method\r\n- IterableDataset is missing: cast, cast_column, filter, rename_column, rename_columns, class_encode_column, flatten, prepare_for_task, train_test_split, shard\r\n- Some other methods are missing but we can discuss them: set_transform, formatted_as, with_transform\r\n- And others don't really make sense for an iterable dataset: select, sort, add_column, add_item\r\n- Dataset is missing skip and take, that IterableDataset implements.\r\n\r\n## Questions\r\n\r\nI think it would be nice to be able to switch between streaming and regular dataset easily, without changing the processing code significantly.\r\n\r\n1. What should be aligned and what shouldn't between those two APIs ?\r\n\r\nIMO the minimum is to align the main processing methods.\r\n\r\nIt would mean aligning breaking the current `Iterable.map` to have the same behavior as `Dataset.map` (add columns with dict.update), and add multiprocessing as well as the missing parameters.\r\n\r\nIt would also mean implementing the missing methods: cast, cast_column, filter, rename_column, rename_columns, class_encode_column, flatten, prepare_for_task, train_test_split, shard\r\n\r\n2. What are the breaking changes for IterableDataset ?\r\n\r\nThe main breaking change would be the change of behavior of `IterableDataset.map`, because currently it discards all the previous columns instead of keeping them.\r\n\r\n3. Shall we also do some changes for regular datasets ?\r\n\r\nI agree the simplest would be to have the exact same methods for both Dataset and IterableDataset. However this is probably not a good idea because it would prevent users from using the best benefits of them. That's why we can keep some aspects of regular datasets as they are:\r\n- keep the eager Dataset.map with caching\r\n- keep the with_transform method for lazy processing\r\n- keep Dataset.select (it could also be added to IterableDataset even though it's not recommended)\r\n\r\nWe could have a completely aligned `map` method if both methods were lazy by default, but this is a very big breaking change so I'm not sure we can consider doing that.\r\n\r\nFor information, TFDS does lazy map by default, and has an additional `.cache()` method.\r\n\r\n## Opinions ?\r\n\r\nI'd love to gather some opinions about this here. If the two APIs are more aligned it would be awesome for the examples in `transformers`, and it would create a satisfactory experience for users that want to switch from one mode to the other.\r\n\r\ncc @mariosasko @albertvillanova @thomwolf @patrickvonplaten @sgugger ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3444\/reactions","total_count":4,"+1":4,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3444\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3443","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3443\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3443\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3443\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3443","id":1082052833,"node_id":"PR_kwDODunzps4v8QDX","number":3443,"title":"Extend iter_archive to support file object input","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-16T10:59:14Z","updated_at":"2021-12-17T17:53:03Z","closed_at":"2021-12-17T17:53:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3443","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3443","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3443.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3443.patch","merged_at":"2021-12-17T17:53:02Z"},"body":"This PR adds support to passing a file object to `[Streaming]DownloadManager.iter_archive`.\r\n\r\nWith this feature, we can iterate over a tar file inside another tar file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3443\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3443\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3442","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3442\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3442\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3442\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3442","id":1081862747,"node_id":"PR_kwDODunzps4v7oBZ","number":3442,"title":"Extend text to support yielding lines, paragraphs or documents","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-12-16T07:33:17Z","updated_at":"2021-12-20T16:59:10Z","closed_at":"2021-12-20T16:39:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3442","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3442","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3442.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3442.patch","merged_at":"2021-12-20T16:39:18Z"},"body":"Add `config.row` option to `text` module to allow yielding lines (default, current case), paragraphs or documents.\r\n\r\nFeel free to comment on the name of the config parameter `row`:\r\n- Currently, the docs state datasets are made of rows and columns\r\n- Other names I considered: `example`, `item`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3442\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3442\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3441","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3441\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3441\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3441\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3441","id":1081571784,"node_id":"I_kwDODunzps5Ad3nI","number":3441,"title":"Add QuALITY dataset","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-15T22:26:19Z","updated_at":"2021-12-28T15:17:05Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** QuALITY\r\n- **Description:** A challenging question answering with very long contexts (Twitter [thread](https:\/\/twitter.com\/sleepinyourhat\/status\/1471225421794529281?s=20))\r\n- **Paper:** No ArXiv link yet, but draft is [here](https:\/\/github.com\/nyu-mll\/quality\/blob\/main\/quality_preprint.pdf)\r\n- **Data:** GitHub repo [here](https:\/\/github.com\/nyu-mll\/quality)\r\n- **Motivation:** This dataset would serve as a nice way to benchmark long-range Transformer models like BigBird, Longformer and their descendants. In particular, it would be very interesting to see how the S4 model fares on this given it's impressive performance on the Long Range Arena\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3441\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3441\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3440","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3440\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3440\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3440\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3440","id":1081528426,"node_id":"I_kwDODunzps5AdtBq","number":3440,"title":"datasets keeps reading from cached files, although I disabled it","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-15T21:26:22Z","updated_at":"2021-12-16T15:32:27Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nHi,\r\nI am trying to avoid dataset library using cached files, I get the following bug when this tried to read the cached files. I tried to do the followings:\r\n```\r\nfrom datasets import set_caching_enabled\r\nset_caching_enabled(False)\r\n```\r\nalso force redownlaod:\r\n```\r\n download_mode='force_redownload'\r\n```\r\nbut none worked so far, this is on a cluster and on some of the machines this reads from the cached files, I really appreciate any idea on how to fully remove caching @lhoestq \r\nmany thanks\r\n\r\n```\r\nFile \"run_clm.py\", line 496, in \r\n main()\r\n File \"run_clm.py\", line 419, in main\r\n train_result = trainer.train(resume_from_checkpoint=checkpoint)\r\n File \"\/users\/dara\/codes\/fewshot\/debug\/fewshot\/third_party\/trainers\/trainer.py\", line 943, in train\r\n self._maybe_log_save_evaluate(tr_loss, model, trial, epoch, ignore_keys_for_eval)\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/transformers\/trainer.py\", line 1445, in _maybe_log_save_evaluate\r\n metrics = self.evaluate(ignore_keys=ignore_keys_for_eval)\r\n File \"\/users\/dara\/codes\/fewshot\/debug\/fewshot\/third_party\/trainers\/trainer.py\", line 172, in evaluate\r\n output = self.eval_loop(\r\n File \"\/users\/dara\/codes\/fewshot\/debug\/fewshot\/third_party\/trainers\/trainer.py\", line 241, in eval_loop\r\n metrics = self.compute_pet_metrics(eval_datasets, model, self.extra_info[metric_key_prefix], task=task)\r\n File \"\/users\/dara\/codes\/fewshot\/debug\/fewshot\/third_party\/trainers\/trainer.py\", line 268, in compute_pet_metrics\r\n centroids = self._compute_per_token_train_centroids(model, task=task)\r\n File \"\/users\/dara\/codes\/fewshot\/debug\/fewshot\/third_party\/trainers\/trainer.py\", line 353, in _compute_per_token_train_centroids\r\n data = get_label_samples(self.get_per_task_train_dataset(task), label)\r\n File \"\/users\/dara\/codes\/fewshot\/debug\/fewshot\/third_party\/trainers\/trainer.py\", line 350, in get_label_samples\r\n return dataset.filter(lambda example: int(example['labels']) == label)\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 470, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py\", line 406, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 2519, in filter\r\n indices = self.map(\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 2036, in map\r\n return self._map_single(\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 503, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 470, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py\", line 406, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 2248, in _map_single\r\n return Dataset.from_file(cache_file_name, info=info, split=self.split)\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 654, in from_file\r\n return cls(\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 593, in __init__\r\n self.info.features = self.info.features.reorder_fields_as(inferred_features)\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1092, in reorder_fields_as\r\n return Features(recursive_reorder(self, other))\r\n File \"\/users\/dara\/conda\/envs\/multisuccess\/lib\/python3.8\/site-packages\/datasets\/features\/features.py\", line 1081, in recursive_reorder\r\n raise ValueError(f\"Keys mismatch: between {source} and {target}\" + stack_position)\r\nValueError: Keys mismatch: between {'indices': Value(dtype='uint64', id=None)} and {'candidates_ids': Sequence(feature=Value(dtype='null', id=None), length=-1, id=None), 'labels': Value(dtype='int64', id=None), 'attention_mask': Sequence(feature=Value(dtype='int8', id=None), length=-1, id=None), 'input_ids': Sequence(feature=Value(dtype='int32', id=None), length=-1, id=None), 'extra_fields': {}, 'task': Value(dtype='string', id=None)}\r\n\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform: linux \r\n- Python version: 3.8.12 \r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3440\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3440\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3439","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3439\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3439\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3439\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3439","id":1081389723,"node_id":"PR_kwDODunzps4v6Hxs","number":3439,"title":"Add `cast_column` to `IterableDataset`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-15T19:00:45Z","updated_at":"2021-12-16T15:55:20Z","closed_at":"2021-12-16T15:55:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3439","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3439","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3439.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3439.patch","merged_at":"2021-12-16T15:55:19Z"},"body":"Closes #3369. \r\n\r\ncc: @patrickvonplaten ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3439\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3439\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3438","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3438\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3438\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3438\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3438","id":1081302203,"node_id":"PR_kwDODunzps4v52Va","number":3438,"title":"Update supported versions of Python in setup.py","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-15T17:30:12Z","updated_at":"2021-12-20T14:22:13Z","closed_at":"2021-12-20T14:22:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3438","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3438","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3438.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3438.patch","merged_at":"2021-12-20T14:22:12Z"},"body":"Update the list of supported versions of Python in `setup.py` to keep the PyPI project description updated.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3438\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3438\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3437","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3437\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3437\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3437\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3437","id":1081247889,"node_id":"PR_kwDODunzps4v5qzI","number":3437,"title":"Update BLEURT hyperlink","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-15T16:34:47Z","updated_at":"2021-12-17T13:28:26Z","closed_at":"2021-12-17T13:28:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3437","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3437","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3437.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3437.patch","merged_at":"2021-12-17T13:28:25Z"},"body":"The description of BLEURT on the hf.co website has a strange use of URL hyperlinking. This PR attempts to fix this, although I am not 100% sure Markdown syntax is allowed on the frontend or not.\r\n\r\n![Screen Shot 2021-12-15 at 17 31 27](https:\/\/user-images.githubusercontent.com\/26859204\/146226432-c83cbdaf-f57d-4999-b53c-85da718ff7fb.png)\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3437\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3437\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3436","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3436\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3436\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3436\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3436","id":1081068139,"node_id":"PR_kwDODunzps4v5FE3","number":3436,"title":"Add the OneStopQa dataset","user":{"login":"scaperex","id":28459495,"node_id":"MDQ6VXNlcjI4NDU5NDk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28459495?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/scaperex","html_url":"https:\/\/github.com\/scaperex","followers_url":"https:\/\/api.github.com\/users\/scaperex\/followers","following_url":"https:\/\/api.github.com\/users\/scaperex\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/scaperex\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/scaperex\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/scaperex\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/scaperex\/orgs","repos_url":"https:\/\/api.github.com\/users\/scaperex\/repos","events_url":"https:\/\/api.github.com\/users\/scaperex\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/scaperex\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-15T13:53:31Z","updated_at":"2021-12-17T14:32:00Z","closed_at":"2021-12-17T13:25:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3436","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3436","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3436.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3436.patch","merged_at":"2021-12-17T13:25:29Z"},"body":"Adding OneStopQA, a multiple choice reading comprehension dataset annotated according to the STARC (Structured Annotations for Reading Comprehension) scheme.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3436\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3436\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3435","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3435\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3435\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3435\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3435","id":1081043756,"node_id":"PR_kwDODunzps4v4_-0","number":3435,"title":"Improve Wikipedia Loading Script","user":{"login":"geohci","id":45494522,"node_id":"MDQ6VXNlcjQ1NDk0NTIy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/45494522?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/geohci","html_url":"https:\/\/github.com\/geohci","followers_url":"https:\/\/api.github.com\/users\/geohci\/followers","following_url":"https:\/\/api.github.com\/users\/geohci\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/geohci\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/geohci\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/geohci\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/geohci\/orgs","repos_url":"https:\/\/api.github.com\/users\/geohci\/repos","events_url":"https:\/\/api.github.com\/users\/geohci\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/geohci\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-12-15T13:30:06Z","updated_at":"2022-02-10T16:50:32Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3435","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3435","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3435.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3435.patch","merged_at":null},"body":"* More structured approach to detecting redirects\r\n* Remove redundant template filter code (covered by strip_code)\r\n* Add language-specific lists of additional media namespace aliases for filtering\r\n* Add language-specific lists of category namespace aliases for new link text cleaning step\r\n* Remove magic words (parser directions like __TOC__ that occasionally occur in text)\r\n\r\nFix #3400\r\n\r\nWith support from @albertvillanova \r\nCC @yjernite","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3435\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3435\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3434","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3434\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3434\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3434\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3434","id":1080917446,"node_id":"I_kwDODunzps5AbX3G","number":3434,"title":"Add The People's Speech","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-15T11:21:21Z","updated_at":"2021-12-15T11:21:21Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** The People's Speech\r\n- **Description:** a massive English-language dataset of audio transcriptions of full sentences. \r\n- **Paper:** https:\/\/openreview.net\/pdf?id=R8CwidgJ0yT\r\n- **Data:** https:\/\/mlcommons.org\/en\/peoples-speech\/\r\n- **Motivation:** With over 30,000 hours of speech, this dataset is the largest and most diverse freely available English speech recognition corpus today.\r\n\r\n\r\n[The article](https:\/\/thegradient.pub\/new-datasets-to-democratize-speech-recognition-technology-2\/) which may be useful when working on the dataset.\r\n\r\ncc: @anton-l \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3434\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":3,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3434\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3433","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3433\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3433\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3433\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3433","id":1080910724,"node_id":"I_kwDODunzps5AbWOE","number":3433,"title":"Add Multilingual Spoken Words dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-15T11:14:44Z","updated_at":"2021-12-15T11:14:44Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Multilingual Spoken Words\r\n- **Description:** Multilingual Spoken Words Corpus is a large and growing audio dataset of spoken words in 50 languages for academic research and commercial applications in keyword spotting and spoken term search, licensed under CC-BY 4.0. The dataset contains more than 340,000 keywords, totaling 23.4 million 1-second spoken examples (over 6,000 hours). \r\n\r\n Read more: https:\/\/mlcommons.org\/en\/news\/spoken-words-blog\/\r\n- **Paper:** https:\/\/datasets-benchmarks-proceedings.neurips.cc\/paper\/2021\/file\/fe131d7f5a6b38b23cc967316c13dae2-Paper-round2.pdf\r\n- **Data:** https:\/\/mlcommons.org\/en\/multilingual-spoken-words\/\r\n- **Motivation:** \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3433\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3433\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3432","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3432\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3432\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3432\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3432","id":1079910769,"node_id":"PR_kwDODunzps4v1NGS","number":3432,"title":"Correctly indent builder config in dataset script docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-14T15:39:47Z","updated_at":"2021-12-14T17:35:17Z","closed_at":"2021-12-14T17:35:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3432","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3432","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3432.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3432.patch","merged_at":"2021-12-14T17:35:17Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3432\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3432\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3431","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3431\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3431\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3431\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3431","id":1079866083,"node_id":"I_kwDODunzps5AXXLj","number":3431,"title":"Unable to resolve any data file after loading once","user":{"login":"Fischer-love-fish","id":84694183,"node_id":"MDQ6VXNlcjg0Njk0MTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/84694183?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Fischer-love-fish","html_url":"https:\/\/github.com\/Fischer-love-fish","followers_url":"https:\/\/api.github.com\/users\/Fischer-love-fish\/followers","following_url":"https:\/\/api.github.com\/users\/Fischer-love-fish\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Fischer-love-fish\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Fischer-love-fish\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Fischer-love-fish\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Fischer-love-fish\/orgs","repos_url":"https:\/\/api.github.com\/users\/Fischer-love-fish\/repos","events_url":"https:\/\/api.github.com\/users\/Fischer-love-fish\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Fischer-love-fish\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-14T15:02:15Z","updated_at":"2021-12-21T11:02:06Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"when I rerun my program, it occurs this error\r\n\" Unable to resolve any data file that matches '['**train*']' at \/data2\/whr\/lzy\/open_domain_data\/retrieval\/wiki_dpr with any supported extension ['csv', 'tsv', 'json', 'jsonl', 'parquet', 'txt', 'zip']\", so how could i deal with this problem?\r\nthx.\r\nAnd below is my code .\r\n![image](https:\/\/user-images.githubusercontent.com\/84694183\/146023446-d75fdec8-65c1-484f-80d8-6c20ff5e994b.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3431\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3431\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3430","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3430\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3430\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3430\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3430","id":1079811124,"node_id":"PR_kwDODunzps4v033w","number":3430,"title":"Make decoding of Audio and Image feature optional","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-12-14T14:15:08Z","updated_at":"2022-01-25T18:57:52Z","closed_at":"2022-01-25T18:57:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3430","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3430","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3430.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3430.patch","merged_at":"2022-01-25T18:57:52Z"},"body":"Add the `decode` argument (`True` by default) to the `Audio` and the `Image` feature to make it possible to toggle on\/off decoding of these features.\r\n\r\nEven though we've discussed that on Slack, I'm not removing the `_storage_dtype` argument of the Audio feature in this PR to avoid breaking the Audio feature tests.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3430\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3430\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3429","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3429\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3429\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3429\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3429","id":1078902390,"node_id":"PR_kwDODunzps4vx1gp","number":3429,"title":"Make cast cacheable (again) on Windows","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-13T19:32:02Z","updated_at":"2021-12-14T14:39:51Z","closed_at":"2021-12-14T14:39:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3429","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3429","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3429.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3429.patch","merged_at":"2021-12-14T14:39:50Z"},"body":"`cast` currently emits the following warning when called on Windows:\r\n```\r\nParameter 'function'=. at 0x000001C930571EA0> of the transform datasets.arrow_dataset.Dataset._map_single couldn't be hashed properly, a random hash was used instead. Make sure your transforms and parameters are serializable with pickle or dill for the dataset fingerprinting \r\nand caching to work. If you reuse this transform, the caching mechanism will consider it to be different \r\nfrom the previous calls and recompute everything. This warning is only showed once. Subsequent hashing failures won't be showed.\r\n```\r\n\r\nIt seems like the issue stems from the `config.PYARROW_VERSION` object not being serializable on Windows (tested with `dumps(lambda: config.PYARROW_VERSION)`), so I'm fixing this by capturing `config.PYARROW_VERSION.major` before the lambda definition.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3429\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3429\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3428","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3428\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3428\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3428\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3428","id":1078863468,"node_id":"PR_kwDODunzps4vxtNT","number":3428,"title":"Clean squad dummy data","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-13T18:46:29Z","updated_at":"2021-12-13T18:57:50Z","closed_at":"2021-12-13T18:57:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3428","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3428","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3428.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3428.patch","merged_at":"2021-12-13T18:57:50Z"},"body":"Some unused files were remaining, this PR removes them. We just need to keep the dummy_data.zip file","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3428\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3428\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3427","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3427\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3427\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3427\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3427","id":1078782159,"node_id":"PR_kwDODunzps4vxb_y","number":3427,"title":"Add The Pile Enron Emails subset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-13T17:14:16Z","updated_at":"2021-12-14T17:30:59Z","closed_at":"2021-12-14T17:30:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3427","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3427","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3427.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3427.patch","merged_at":"2021-12-14T17:30:55Z"},"body":"Add:\r\n- Enron Emails subset of The Pile: \"enron_emails\" config\r\n\r\nClose bigscience-workshop\/data_tooling#310.\r\n\r\nCC: @StellaAthena","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3427\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3427\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3426","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3426\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3426\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3426\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3426","id":1078670031,"node_id":"PR_kwDODunzps4vxEN5","number":3426,"title":"Update disaster_response_messages download urls (+ add validation split)","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-13T15:30:12Z","updated_at":"2021-12-14T14:38:30Z","closed_at":"2021-12-14T14:38:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3426","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3426","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3426.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3426.patch","merged_at":"2021-12-14T14:38:29Z"},"body":"Fixes #3240, fixes #3416","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3426\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3426\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3425","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3425\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3425\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3425\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3425","id":1078598140,"node_id":"I_kwDODunzps5AShn8","number":3425,"title":"Getting configs names takes too long","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-12-13T14:27:57Z","updated_at":"2021-12-13T14:53:33Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import get_dataset_config_names\r\nget_dataset_config_names(\"allenai\/c4\")\r\n```\r\n\r\n## Expected results\r\n\r\nI would expect to get the answer quickly, at least in less than 10s\r\n\r\n## Actual results\r\n\r\nIt takes about 45s on my environment\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux-5.11.0-1022-aws-x86_64-with-glibc2.31\r\n- Python version: 3.9.6\r\n- PyArrow version: 4.0.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3425\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3425\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3424","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3424\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3424\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3424\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3424","id":1078543625,"node_id":"PR_kwDODunzps4vwpNt","number":3424,"title":"Add RedCaps dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-13T13:38:13Z","updated_at":"2022-01-12T14:13:16Z","closed_at":"2022-01-12T14:13:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3424","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3424","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3424.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3424.patch","merged_at":"2022-01-12T14:13:15Z"},"body":"Add the RedCaps dataset. I'm not adding the generated `dataset_infos.json` file for now due to its size (11 MB). \r\n\r\nTODOs:\r\n- [x] dummy data\r\n- [x] dataset card\r\n\r\nClose #3316 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3424\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3424\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3423","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3423\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3423\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3423\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3423","id":1078049638,"node_id":"I_kwDODunzps5AQbtm","number":3423,"title":"data duplicate when setting num_works > 1 with streaming data","user":{"login":"cloudyuyuyu","id":16486492,"node_id":"MDQ6VXNlcjE2NDg2NDky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16486492?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cloudyuyuyu","html_url":"https:\/\/github.com\/cloudyuyuyu","followers_url":"https:\/\/api.github.com\/users\/cloudyuyuyu\/followers","following_url":"https:\/\/api.github.com\/users\/cloudyuyuyu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cloudyuyuyu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cloudyuyuyu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cloudyuyuyu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cloudyuyuyu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cloudyuyuyu\/repos","events_url":"https:\/\/api.github.com\/users\/cloudyuyuyu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cloudyuyuyu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":3287858981,"node_id":"MDU6TGFiZWwzMjg3ODU4OTgx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/streaming","name":"streaming","color":"fef2c0","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-12-13T03:43:17Z","updated_at":"2021-12-14T10:53:30Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe data is repeated num_works times when we load_dataset with streaming and set num_works > 1 when construct dataloader\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nimport pandas as pd\r\nimport numpy as np\r\nimport os\r\n\r\nfrom datasets import load_dataset\r\nfrom torch.utils.data import DataLoader\r\nfrom tqdm import tqdm\r\nimport shutil\r\n\r\nNUM_OF_USER = 1000000\r\nNUM_OF_ACTION = 50000\r\nNUM_OF_SEQUENCE = 10000\r\nNUM_OF_FILES = 32\r\nNUM_OF_WORKERS = 16\r\n\r\nif __name__ == \"__main__\":\r\n shutil.rmtree(\".\/dataset\")\r\n for i in range(NUM_OF_FILES):\r\n sequence_data = pd.DataFrame(\r\n {\r\n \"imei\": np.random.randint(1, NUM_OF_USER, size=NUM_OF_SEQUENCE),\r\n \"sequence\": np.random.randint(1, NUM_OF_ACTION, size=NUM_OF_SEQUENCE)\r\n }\r\n )\r\n\r\n if not os.path.exists(\".\/dataset\"):\r\n os.makedirs(\".\/dataset\")\r\n\r\n sequence_data.to_csv(f\".\/dataset\/sequence_data_{i}.csv\",\r\n \r\n index=False)\r\n\r\n dataset = load_dataset(\"csv\",\r\n data_files=[os.path.join(\".\/dataset\",file) for file in os.listdir(\".\/dataset\") if file.endswith(\".csv\")],\r\n split=\"train\",\r\n streaming=True).with_format(\"torch\")\r\n data_loader = DataLoader(dataset,\r\n batch_size=1024,\r\n num_workers=NUM_OF_WORKERS)\r\n \r\n result = pd.DataFrame()\r\n for i, batch in tqdm(enumerate(data_loader)):\r\n result = pd.concat([result, \r\n pd.DataFrame(batch)],\r\n axis=0)\r\n result.to_csv(f\"num_work_{NUM_OF_WORKERS}.csv\", index=False)\r\n\r\n```\r\n\r\n## Expected results\r\ndata do not duplicate \r\n\r\n## Actual results\r\ndata duplicate NUM_OF_WORKERS = 16 \r\n![image](https:\/\/user-images.githubusercontent.com\/16486492\/145748707-9d2df25b-2f4f-4d7b-a83e-242be4fc8934.png)\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version:datasets==1.14.0\r\n- Platform:transformers==4.11.3\r\n- Python version:3.8\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3423\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3423\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3422","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3422\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3422\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3422\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3422","id":1078022619,"node_id":"I_kwDODunzps5AQVHb","number":3422,"title":"Error about load_metric","user":{"login":"jiacheng-ye","id":30772464,"node_id":"MDQ6VXNlcjMwNzcyNDY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30772464?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jiacheng-ye","html_url":"https:\/\/github.com\/jiacheng-ye","followers_url":"https:\/\/api.github.com\/users\/jiacheng-ye\/followers","following_url":"https:\/\/api.github.com\/users\/jiacheng-ye\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jiacheng-ye\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jiacheng-ye\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jiacheng-ye\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jiacheng-ye\/orgs","repos_url":"https:\/\/api.github.com\/users\/jiacheng-ye\/repos","events_url":"https:\/\/api.github.com\/users\/jiacheng-ye\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jiacheng-ye\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-13T02:49:51Z","updated_at":"2022-01-07T14:06:47Z","closed_at":"2022-01-07T14:06:47Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1371, in load_metric\r\n metric = metric_cls(\r\nTypeError: 'NoneType' object is not callable\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nmetric = load_metric(\"glue\", \"sst2\")\r\n```\r\n\r\n\r\n## Environment info\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux-4.15.0-161-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.3\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3422\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3422\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3421","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3421\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3421\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3421\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3421","id":1077966571,"node_id":"PR_kwDODunzps4vuvJK","number":3421,"title":"Adding mMARCO dataset","user":{"login":"lhbonifacio","id":17603035,"node_id":"MDQ6VXNlcjE3NjAzMDM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17603035?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhbonifacio","html_url":"https:\/\/github.com\/lhbonifacio","followers_url":"https:\/\/api.github.com\/users\/lhbonifacio\/followers","following_url":"https:\/\/api.github.com\/users\/lhbonifacio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhbonifacio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhbonifacio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhbonifacio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhbonifacio\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhbonifacio\/repos","events_url":"https:\/\/api.github.com\/users\/lhbonifacio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhbonifacio\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-12-13T00:56:43Z","updated_at":"2022-01-13T10:06:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3421","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3421","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3421.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3421.patch","merged_at":null},"body":"Adding mMARCO (v1.1) to HF datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3421\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3421\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3420","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3420\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3420\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3420\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3420","id":1077913468,"node_id":"PR_kwDODunzps4vukyD","number":3420,"title":"Add eli5_category dataset","user":{"login":"jingshenSN2","id":40377373,"node_id":"MDQ6VXNlcjQwMzc3Mzcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40377373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jingshenSN2","html_url":"https:\/\/github.com\/jingshenSN2","followers_url":"https:\/\/api.github.com\/users\/jingshenSN2\/followers","following_url":"https:\/\/api.github.com\/users\/jingshenSN2\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jingshenSN2\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jingshenSN2\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jingshenSN2\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jingshenSN2\/orgs","repos_url":"https:\/\/api.github.com\/users\/jingshenSN2\/repos","events_url":"https:\/\/api.github.com\/users\/jingshenSN2\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jingshenSN2\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-12T21:30:45Z","updated_at":"2021-12-14T17:53:03Z","closed_at":"2021-12-14T17:53:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3420","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3420","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3420.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3420.patch","merged_at":"2021-12-14T17:53:02Z"},"body":"This pull request adds a categorized Long-form question answering dataset `ELI5_Category`. It's a new variant of the [ELI5](https:\/\/huggingface.co\/datasets\/eli5) dataset that uses the Reddit tags to alleviate the training\/validation overlapping in the origin ELI5 dataset.\r\n\r\nA [report](https:\/\/celeritasml.netlify.app\/posts\/2021-12-01-eli5c\/)(Section 2) on this dataset. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3420\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3420\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3419","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3419\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3419\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3419\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3419","id":1077350974,"node_id":"I_kwDODunzps5ANxI-","number":3419,"title":"`.to_json` is extremely slow after `.select`","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-12-11T01:36:31Z","updated_at":"2021-12-21T15:49:07Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nSaving a dataset to JSON with `to_json` is extremely slow after using `.select` on the original dataset.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\noriginal = load_dataset(\"squad\", split=\"train\")\r\noriginal.to_json(\"from_original.json\") # Takes 0 seconds\r\n\r\nselected_subset1 = original.select([i for i in range(len(original))])\r\nselected_subset1.to_json(\"from_select1.json\") # Takes 212 seconds\r\n\r\nselected_subset2 = original.select([i for i in range(int(len(original) \/ 2))])\r\nselected_subset2.to_json(\"from_select2.json\") # Takes 90 seconds\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: master (https:\/\/github.com\/huggingface\/datasets\/commit\/6090f3cfb5c819f441dd4a4bb635e037c875b044)\r\n- Platform: Linux-4.4.0-19041-Microsoft-x86_64-with-glibc2.27\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3419\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3419\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3418","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3418\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3418\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3418\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3418","id":1077053296,"node_id":"PR_kwDODunzps4vsHMK","number":3418,"title":"Add Wikisource dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-10T17:04:44Z","updated_at":"2022-02-08T12:19:14Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3418","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3418","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3418.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3418.patch","merged_at":null},"body":"Add loading script for Wikisource dataset.\r\n\r\nFix #3399.\r\n\r\nCC: @geohci, @yjernite","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3418\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3418\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3417","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3417\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3417\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3417\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3417","id":1076943343,"node_id":"PR_kwDODunzps4vrwd7","number":3417,"title":"Fix type of bridge field in QED","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-10T15:07:21Z","updated_at":"2021-12-14T14:39:06Z","closed_at":"2021-12-14T14:39:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3417","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3417","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3417.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3417.patch","merged_at":"2021-12-14T14:39:05Z"},"body":"Use `Value(\"string\")` instead of `Value(\"bool\")` for the feature type of the `\"bridge\"` field in the QED dataset. If the value is `False`, set to `None`.\r\n\r\nThe following paragraph in the QED repo explains the purpose of this field:\r\n>Each annotation in referential_equalities is a pair of spans, the question_reference and the sentence_reference, corresponding to an entity mention in the question and the selected_sentence respectively. As described in the paper, sentence_references can be \"bridged in\", in which case they do not correspond with any actual span in the selected_sentence. Hence, sentence_reference spans contain an additional field, bridge, which is a prepositional phrase when a reference is bridged, and is False otherwise. Prepositional phrases serve to link bridged references to an anchoring phrase in the selected_sentence. In the case a sentence_reference is bridged, the start and end, as well as the span string, map to such an anchoring phrase in the selected_sentence.\r\n\r\nFix #3346\r\n\r\ncc @VictorSanh ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3417\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3417\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3416","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3416\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3416\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3416\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3416","id":1076868771,"node_id":"I_kwDODunzps5AL7aj","number":3416,"title":"disaster_response_messages unavailable","user":{"login":"sacdallago","id":6240943,"node_id":"MDQ6VXNlcjYyNDA5NDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6240943?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sacdallago","html_url":"https:\/\/github.com\/sacdallago","followers_url":"https:\/\/api.github.com\/users\/sacdallago\/followers","following_url":"https:\/\/api.github.com\/users\/sacdallago\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sacdallago\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sacdallago\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sacdallago\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sacdallago\/orgs","repos_url":"https:\/\/api.github.com\/users\/sacdallago\/repos","events_url":"https:\/\/api.github.com\/users\/sacdallago\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sacdallago\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-10T13:49:17Z","updated_at":"2021-12-14T14:38:29Z","closed_at":"2021-12-14T14:38:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '* disaster_response_messages*'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/disaster_response_messages\r\n\r\nDataset unavailable. Link dead: https:\/\/datasets.appen.com\/appen_datasets\/disaster_response_data\/disaster_response_messages_training.csv\r\n\r\nAm I the one who added this dataset ?No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3416\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3416\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3415","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3415\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3415\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3415\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3415","id":1076472534,"node_id":"I_kwDODunzps5AKarW","number":3415,"title":"Non-deterministic tests: CI tests randomly fail","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-10T06:08:59Z","updated_at":"2021-12-14T14:01:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nSome CI tests fail randomly.\r\n\r\n1. In https:\/\/github.com\/huggingface\/datasets\/pull\/3375\/commits\/c10275fe36085601cb7bdb9daee9a8f1fc734f48, there were 3 failing tests, only on Linux:\r\n ```\r\n =========================== short test summary info ============================\r\n FAILED tests\/test_streaming_download_manager.py::test_streaming_dl_manager_get_extraction_protocol[https:\/\/drive.google.com\/uc?export=download&id=1k92sUfpHxKq8PXWRr7Y5aNHXwOCNUmqh-zip]\r\n FAILED tests\/test_streaming_download_manager.py::test_streaming_gg_drive - Fi...\r\n FAILED tests\/test_streaming_download_manager.py::test_streaming_gg_drive_zipped\r\n = 3 failed, 3553 passed, 2950 skipped, 2 xfailed, 1 xpassed, 125 warnings in 192.79s (0:03:12) =\r\n ```\r\n\r\n2. After re-running the CI (without any change in the code) in https:\/\/github.com\/huggingface\/datasets\/pull\/3375\/commits\/57bfe1f342cd3c59d2510b992d5f06a0761eb147, there was only 1 failing test (one on Linux and a different one on Windows):\r\n - On Linux:\r\n ```\r\n =========================== short test summary info ============================\r\n FAILED tests\/test_streaming_download_manager.py::test_streaming_gg_drive_zipped\r\n = 1 failed, 3555 passed, 2950 skipped, 2 xfailed, 1 xpassed, 125 warnings in 199.76s (0:03:19) =\r\n ```\r\n - On Windows:\r\n ```\r\n =========================== short test summary info ===========================\r\n FAILED tests\/test_load.py::test_load_dataset_builder_for_community_dataset_without_script\r\n = 1 failed, 3551 passed, 2954 skipped, 2 xfailed, 1 xpassed, 121 warnings in 478.58s (0:07:58) =\r\n ```\r\n\r\n The test `tests\/test_streaming_download_manager.py::test_streaming_gg_drive_zipped` passes locally.\r\n\r\n3. After re-running again the CI (without any change in the code) in https:\/\/github.com\/huggingface\/datasets\/pull\/3375\/commits\/39f32f2119cf91b86867216bb5c356c586503c6a, ALL the tests passed.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3415\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3415\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3414","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3414\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3414\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3414\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3414","id":1076028998,"node_id":"PR_kwDODunzps4voyaq","number":3414,"title":"Skip None encoding (line deleted by accident in #3195)","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-09T21:17:33Z","updated_at":"2021-12-10T11:00:03Z","closed_at":"2021-12-10T11:00:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3414","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3414","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3414.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3414.patch","merged_at":"2021-12-10T11:00:02Z"},"body":"Return the line deleted by accident in #3195 while [resolving merge conflicts](https:\/\/github.com\/huggingface\/datasets\/pull\/3195\/commits\/8b0ed15be08559056b817836a07d47acda0c4510).\r\n\r\n\r\nFix #3181 (finally :))\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3414\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3414\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3413","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3413\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3413\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3413\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3413","id":1075854325,"node_id":"PR_kwDODunzps4voNZv","number":3413,"title":"Add WIDER FACE dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-09T18:03:38Z","updated_at":"2022-01-12T14:13:47Z","closed_at":"2022-01-12T14:13:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3413","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3413","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3413.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3413.patch","merged_at":"2022-01-12T14:13:47Z"},"body":"Adds the WIDER FACE face detection benchmark.\r\n\r\nTODOs:\r\n* [x] dataset card\r\n* [x] dummy data ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3413\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3413\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3412","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3412\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3412\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3412\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3412","id":1075846368,"node_id":"PR_kwDODunzps4voLs4","number":3412,"title":"Fix flaky test again for s3 serialization","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-09T17:54:41Z","updated_at":"2021-12-09T18:00:52Z","closed_at":"2021-12-09T18:00:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3412","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3412","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3412.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3412.patch","merged_at":"2021-12-09T18:00:52Z"},"body":"Following https:\/\/github.com\/huggingface\/datasets\/pull\/3388 that wasn't enough (see CI error [here](https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/9080\/workflows\/b971fb27-ff20-4220-9416-c19acdfdf6f4\/jobs\/55985))","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3412\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3412\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3411","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3411\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3411\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3411\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3411","id":1075846272,"node_id":"I_kwDODunzps5AIByA","number":3411,"title":"[chinese wwm] load_datasets behavior not as expected when using run_mlm_wwm.py script","user":{"login":"hyusterr","id":52968111,"node_id":"MDQ6VXNlcjUyOTY4MTEx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52968111?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hyusterr","html_url":"https:\/\/github.com\/hyusterr","followers_url":"https:\/\/api.github.com\/users\/hyusterr\/followers","following_url":"https:\/\/api.github.com\/users\/hyusterr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hyusterr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hyusterr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hyusterr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hyusterr\/orgs","repos_url":"https:\/\/api.github.com\/users\/hyusterr\/repos","events_url":"https:\/\/api.github.com\/users\/hyusterr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hyusterr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-09T17:54:35Z","updated_at":"2021-12-22T11:21:33Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nModel I am using (Bert, XLNet ...): bert-base-chinese\r\n\r\nThe problem arises when using:\r\n* [https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/research_projects\/mlm_wwm\/run_mlm_wwm.py] the official example scripts: `rum_mlm_wwm.py`\r\n\r\nThe tasks I am working on is: pretraining whole word masking with my own dataset and ref.json file\r\nI tried follow the run_mlm_wwm.py procedure to do whole word masking on pretraining task. my file is in .txt form, where one line represents one sample, with `9,264,784` chinese lines in total. the ref.json file is also contains 9,264,784 lines of whole word masking reference data for my chinese corpus. but when I try to adapt the run_mlm_wwm.py script, it shows that somehow after \r\n`datasets[\"train\"] = load_dataset(...`\r\n`len(datasets[\"train\"])` returns `9,265,365`\r\nthen, after `tokenized_datasets = datasets.map(...`\r\n`len(tokenized_datasets[\"train\"])` returns `9,265,279`\r\nI'm really confused and tried to trace code by myself but can't know what happened after a week trial.\r\n\r\nI want to know what happened in the `load_dataset()` function and `datasets.map` here and how did I get more lines of data than I input. so I'm here to ask.\r\n## To reproduce\r\nSorry that I can't provide my data here since it did not belong to me. but I'm sure I remove the blank lines.\r\n\r\n## Expected behavior\r\nI expect the code run as it should. but the AssertionError in line 167 keeps raise as the line of reference json and datasets['train'] differs.\r\n\r\nThanks for your patient reading!\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-5.4.0-91-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10 \r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3411\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3411\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3410","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3410\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3410\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3410\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3410","id":1075815415,"node_id":"PR_kwDODunzps4voFG7","number":3410,"title":"Fix dependencies conflicts in Windows CI after conda update to 4.11","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-09T17:19:11Z","updated_at":"2021-12-09T17:36:20Z","closed_at":"2021-12-09T17:36:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3410","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3410","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3410.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3410.patch","merged_at":"2021-12-09T17:36:19Z"},"body":"For some reason the CI wasn't using python 3.6 but python 3.7 after the update to conda 4.11","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3410\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3410\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3409","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3409\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3409\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3409\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3409","id":1075684593,"node_id":"PR_kwDODunzps4vnpU0","number":3409,"title":"Pass new_fingerprint in multiprocessing","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-09T15:12:00Z","updated_at":"2021-12-09T17:38:44Z","closed_at":"2021-12-09T17:38:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3409","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3409","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3409.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3409.patch","merged_at":"2021-12-09T17:38:43Z"},"body":"Following https:\/\/github.com\/huggingface\/datasets\/pull\/3045\r\n\r\nCurrently one can pass `new_fingerprint` to `.map()` to use a custom fingerprint instead of the one computed by hashing the map transform. However it's ignored if `num_proc>1`.\r\n\r\nIn this PR I fixed that by passing `new_fingerprint` to `._map_single()` when `num_proc>1`.\r\nMore specifically, `new_fingerprint` with a suffix based on the process `rank` is passed, so that each process has a different `new_fingerprint`\r\n\r\ncc @TevenLeScao @vlievin","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3409\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3409\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3408","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3408\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3408\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3408\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3408","id":1075642915,"node_id":"I_kwDODunzps5AHQIj","number":3408,"title":"Typo in Dataset viewer error message","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"assignees":[{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-12-09T14:34:02Z","updated_at":"2021-12-22T11:02:53Z","closed_at":"2021-12-22T11:02:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*name of the dataset*'\r\n\r\n**Link:** *link to the dataset viewer page*\r\n\r\n*short description of the issue*\r\n\r\nWhen creating an empty dataset repo, the Dataset Preview provides a helpful message that no files were found. There is a tiny typo in that message: \"ressource\" should be \"resource\"\r\n\r\n![Screen Shot 2021-12-09 at 15 31 31](https:\/\/user-images.githubusercontent.com\/26859204\/145415725-9cd728f0-c2c8-4b4e-a8e1-4f4d7841c94a.png)\r\n\r\n\r\nAm I the one who added this dataset ? \r\n\r\nN\/A\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3408\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3408\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3407","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3407\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3407\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3407\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3407","id":1074502225,"node_id":"PR_kwDODunzps4vjyrB","number":3407,"title":"Use max number of data files to infer module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-08T14:58:43Z","updated_at":"2021-12-14T17:08:42Z","closed_at":"2021-12-14T17:08:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3407","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3407","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3407.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3407.patch","merged_at":"2021-12-14T17:08:41Z"},"body":"When inferring the module for datasets without script, set a maximum number of iterations over data files.\r\n\r\nThis PR fixes the issue of taking too long when hundred of data files present.\r\n\r\nPlease, feel free to agree on both numbers:\r\n```\r\n# Datasets without script\r\nDATA_FILES_MAX_NUMBER = 10\r\nARCHIVED_DATA_FILES_MAX_NUMBER = 5\r\n```\r\n\r\nFix #3404.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3407\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3407\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3406","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3406\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3406\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3406\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3406","id":1074366050,"node_id":"PR_kwDODunzps4vjV21","number":3406,"title":"Fix module inference for archive with a directory","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-08T12:39:12Z","updated_at":"2021-12-08T13:03:30Z","closed_at":"2021-12-08T13:03:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3406","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3406","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3406.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3406.patch","merged_at":"2021-12-08T13:03:28Z"},"body":"Fix module inference for an archive file that contains files within a directory.\r\n\r\nFix #3405.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3406\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3406\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3405","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3405\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3405\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3405\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3405","id":1074360362,"node_id":"I_kwDODunzps5ACXAq","number":3405,"title":"ZIP format inference does not work when files located in a dir inside the archive","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-12-08T12:32:15Z","updated_at":"2021-12-08T13:03:29Z","closed_at":"2021-12-08T13:03:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen a zipped file contains archived files within a directory, the function `infer_module_for_data_files_in_archives` does not work.\r\n\r\nIt only works for files located in the root directory of the ZIP file.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ninfer_module_for_data_files_in_archives([\"path\/to\/zip\/file.zip\"], False)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3405\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3405\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3404","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3404\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3404\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3404\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3404","id":1073657561,"node_id":"I_kwDODunzps4__rbZ","number":3404,"title":"Optimize ZIP format inference","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-12-07T18:44:49Z","updated_at":"2021-12-14T17:08:41Z","closed_at":"2021-12-14T17:08:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nWhen hundreds of ZIP files are present in a dataset, format inference takes too long.\r\n\r\nSee: https:\/\/github.com\/bigscience-workshop\/data_tooling\/issues\/232#issuecomment-986685497\r\n\r\n**Describe the solution you'd like**\r\nIterate over a maximum number of files.\r\n\r\nCC: @lhoestq \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3404\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3404\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3403","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3403\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3403\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3403\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3403","id":1073622120,"node_id":"I_kwDODunzps4__ixo","number":3403,"title":"Cannot import name 'maybe_sync'","user":{"login":"KMFODA","id":35491698,"node_id":"MDQ6VXNlcjM1NDkxNjk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35491698?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KMFODA","html_url":"https:\/\/github.com\/KMFODA","followers_url":"https:\/\/api.github.com\/users\/KMFODA\/followers","following_url":"https:\/\/api.github.com\/users\/KMFODA\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KMFODA\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KMFODA\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KMFODA\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KMFODA\/orgs","repos_url":"https:\/\/api.github.com\/users\/KMFODA\/repos","events_url":"https:\/\/api.github.com\/users\/KMFODA\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KMFODA\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-12-07T17:57:59Z","updated_at":"2021-12-17T07:00:35Z","closed_at":"2021-12-17T07:00:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nCannot seem to import datasets when running run_summarizer.py script on a VM set up on ovhcloud\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n```\r\n\r\n## Expected results\r\nNo error\r\n\r\n## Actual results\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/__init__.py\", line 34, in \r\n from .arrow_dataset import Dataset, concatenate_datasets\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 48, in \r\n from .arrow_writer import ArrowWriter, OptimizedTypedSequence\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 27, in \r\n from .features import (\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/features\/__init__.py\", line 2, in \r\n from .audio import Audio\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/features\/audio.py\", line 8, in \r\n from ..utils.streaming_download_manager import xopen\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/utils\/streaming_download_manager.py\", line 16, in \r\n from ..filesystems import COMPRESSION_FILESYSTEMS\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/filesystems\/__init__.py\", line 13, in \r\n from .s3filesystem import S3FileSystem # noqa: F401\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/filesystems\/s3filesystem.py\", line 1, in \r\n import s3fs\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/s3fs\/__init__.py\", line 1, in \r\n from .core import S3FileSystem, S3File\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/s3fs\/core.py\", line 11, in \r\n from fsspec.asyn import AsyncFileSystem, sync, sync_wrapper, maybe_sync\r\nImportError: cannot import name 'maybe_sync' from 'fsspec.asyn' (\/opt\/conda\/lib\/python3.7\/site-packages\/fsspec\/asyn.py)\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.16.0\r\n- Platform: OVH Cloud Tesla V100 Machine\r\n- Python version: 3.7.9\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3403\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3403\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3402","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3402\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3402\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3402\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3402","id":1073614815,"node_id":"PR_kwDODunzps4vg5Ff","number":3402,"title":"More robust first elem check in encode\/cast example","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-07T17:48:16Z","updated_at":"2021-12-08T13:02:16Z","closed_at":"2021-12-08T13:02:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3402","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3402","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3402.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3402.patch","merged_at":"2021-12-08T13:02:15Z"},"body":"Fix #3306 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3402\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3402\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3401","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3401\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3401\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3401\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3401","id":1073603508,"node_id":"I_kwDODunzps4__eO0","number":3401,"title":"Add Wikimedia pre-processed datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-07T17:33:19Z","updated_at":"2021-12-07T17:43:37Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Add pre-processed data to:\r\n - *wikimedia\/wikipedia*: https:\/\/huggingface.co\/datasets\/wikimedia\/wikipedia\r\n - *wikimedia\/wikisource*: https:\/\/huggingface.co\/datasets\/wikimedia\/wikisource\r\n- **Description:** Add pre-processed data to the Hub for all languages\r\n- **Paper:** *link to the dataset paper if available*\r\n- **Data:** *link to the Github repository or current dataset location*\r\n- **Motivation:** This will be very useful for the NLP community, as the pre-processing has a high cost for lot of researchers (both in computation and in knowledge)\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nCC: @geohci, @yjernite","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3401\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3401\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3400","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3400\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3400\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3400\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3400","id":1073600382,"node_id":"I_kwDODunzps4__dd-","number":3400,"title":"Improve Wikipedia loading script","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-07T17:29:25Z","updated_at":"2021-12-07T20:25:51Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As reported by @geohci, the \"wikipedia\" processing\/loading script could be improved by some additional small suggested processing functions:\r\n- _extract_content(filepath):\r\n - Replace .startswith(\"#redirect\") with more structured approach: if elem.find(f\".\/{namespace}redirect\") is None: continue\r\n- _parse_and_clean_wikicode(raw_content, parser):\r\n - Remove rm_template from cleaning -- this is redundant with .strip_code() from mwparserformhell\r\n - Build a language-specific list of namespace prefixes to filter out per below get_namespace_prefixes\r\n - Optional: strip prefixes like categories -- e.g., Category:Towns in Tianjin becomes Towns in Tianjin\r\n - Optional: strip magic words\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3400\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3400\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3399","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3399\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3399\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3399\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3399","id":1073593861,"node_id":"I_kwDODunzps4__b4F","number":3399,"title":"Add Wikisource dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-12-07T17:21:31Z","updated_at":"2021-12-10T17:26:26Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *wikisource*\r\n- **Description:** *short description of the dataset (or link to social media or blog post)*\r\n- **Paper:** *link to the dataset paper if available*\r\n- **Data:** *link to the Github repository or current dataset location*\r\n- **Motivation:** Additional high quality textual data, besides Wikipedia.\r\n\r\nAdd loading script as \"canonical\" dataset (as it is the case for \"\"wikipedia\").\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nCC: @geohci, @yjernite ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3399\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3399\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3398","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3398\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3398\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3398\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3398","id":1073590384,"node_id":"I_kwDODunzps4__bBw","number":3398,"title":"Add URL field to Wikimedia dataset instances: wikipedia,...","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-07T17:17:27Z","updated_at":"2021-12-07T17:28:12Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As reported by @geohci, once we will host pre-processed data in the Hub, we should add the full URL to data instances (new field \"url\") in order to conform to proper attribution from license requirement. See, e.g.: https:\/\/fair-trec.github.io\/docs\/Fair_Ranking_2021_Participant_Instructions.pdf#subsection.3.2\r\n\r\nThis should be done for all pre-processed datasets under \"wikimedia\" org in the Hub: https:\/\/huggingface.co\/wikimedia\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3398\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3398\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3397","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3397\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3397\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3397\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3397","id":1073502444,"node_id":"PR_kwDODunzps4vgh1U","number":3397,"title":"add BNL newspapers ","user":{"login":"davanstrien","id":8995957,"node_id":"MDQ6VXNlcjg5OTU5NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8995957?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/davanstrien","html_url":"https:\/\/github.com\/davanstrien","followers_url":"https:\/\/api.github.com\/users\/davanstrien\/followers","following_url":"https:\/\/api.github.com\/users\/davanstrien\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/davanstrien\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/davanstrien\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/davanstrien\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/davanstrien\/orgs","repos_url":"https:\/\/api.github.com\/users\/davanstrien\/repos","events_url":"https:\/\/api.github.com\/users\/davanstrien\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/davanstrien\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-12-07T15:43:21Z","updated_at":"2022-01-17T18:35:34Z","closed_at":"2022-01-17T18:35:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3397","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3397","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3397.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3397.patch","merged_at":"2022-01-17T18:35:34Z"},"body":"This pull request adds the BNL's [processed newspaper collections](https:\/\/data.bnl.lu\/data\/historical-newspapers\/) as a dataset. This is partly done to support BigScience see: https:\/\/github.com\/bigscience-workshop\/data_tooling\/issues\/192. \r\n\r\nThe Datacard is more sparse than I would like but I plan to make a separate pull request to try and make this more complete at a later date. \r\n\r\nI had to manually add the `dummy_data` but I believe I've done this correctly (the tests pass locally). \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3397\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3397\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3396","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3396\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3396\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3396\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3396","id":1073467183,"node_id":"I_kwDODunzps4_-88v","number":3396,"title":"Install Audio dependencies to support audio decoding","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-07T15:11:36Z","updated_at":"2022-01-10T16:48:40Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*openslr*', '*projecte-aina\/parlament_parla*'\r\n\r\n**Link:** *https:\/\/huggingface.co\/datasets\/openslr*\r\n**Link:** *https:\/\/huggingface.co\/datasets\/projecte-aina\/parlament_parla*\r\n\r\nError:\r\n```\r\nStatus code: 400\r\nException: ImportError\r\nMessage: To support decoding audio files, please install 'librosa'.\r\n```\r\n\r\nAm I the one who added this dataset ? Yes-No\r\n- openslr: No\r\n- projecte-aina\/parlament_parla: Yes\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3396\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3396\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3395","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3395\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3395\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3395\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3395","id":1073432650,"node_id":"PR_kwDODunzps4vgTKG","number":3395,"title":"Fix formatting in IterableDataset.map docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-07T14:41:01Z","updated_at":"2021-12-08T10:11:33Z","closed_at":"2021-12-08T10:11:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3395","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3395","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3395.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3395.patch","merged_at":"2021-12-08T10:11:32Z"},"body":"Fix formatting in the recently added `Map` section of the streaming docs.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3395\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3395\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3394","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3394\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3394\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3394\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3394","id":1073396308,"node_id":"I_kwDODunzps4_-rpU","number":3394,"title":"Preserve all feature types when saving a dataset on the Hub with `push_to_hub`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-12-07T14:08:30Z","updated_at":"2021-12-21T17:00:09Z","closed_at":"2021-12-21T17:00:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, if one of the dataset features is of type `ClassLabel`, saving the dataset with `push_to_hub` and reloading the dataset with `load_dataset` will return the feature of type `Value`. To fix this, we should do something similar to `save_to_disk` (which correctly preserves the types) and not only push the parquet files in `push_to_hub`, but also the dataset `info` (stored in a JSON file).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3394\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3394\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3393","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3393\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3393\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3393\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3393","id":1073189777,"node_id":"I_kwDODunzps4_95OR","number":3393,"title":"Common Voice Belarusian Dataset","user":{"login":"wiedymi","id":42713027,"node_id":"MDQ6VXNlcjQyNzEzMDI3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42713027?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wiedymi","html_url":"https:\/\/github.com\/wiedymi","followers_url":"https:\/\/api.github.com\/users\/wiedymi\/followers","following_url":"https:\/\/api.github.com\/users\/wiedymi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wiedymi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wiedymi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wiedymi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wiedymi\/orgs","repos_url":"https:\/\/api.github.com\/users\/wiedymi\/repos","events_url":"https:\/\/api.github.com\/users\/wiedymi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wiedymi\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-07T10:37:02Z","updated_at":"2021-12-09T15:56:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *Common Voice Belarusian Dataset*\r\n- **Description:** *[commonvoice.mozilla.org\/be](https:\/\/commonvoice.mozilla.org\/be)*\r\n- **Data:** *[commonvoice.mozilla.org\/be\/datasets](https:\/\/commonvoice.mozilla.org\/be\/datasets)*\r\n- **Motivation:** *It has more than 7GB of data, so it will be great to have it in this package so anyone can try to train something for Belarusian language.*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3393\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3393\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3392","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3392\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3392\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3392\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3392","id":1073073408,"node_id":"I_kwDODunzps4_9c0A","number":3392,"title":"Dataset viewer issue for `dansbecker\/hackernews_hiring_posts`","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-07T08:41:01Z","updated_at":"2021-12-07T14:04:28Z","closed_at":"2021-12-07T14:04:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for `dansbecker\/hackernews_hiring_posts`\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/dansbecker\/hackernews_hiring_posts\r\n\r\n*short description of the issue*\r\n\r\nDataset preview not showing for uploaded DatasetDict. See https:\/\/discuss.huggingface.co\/t\/dataset-preview-not-showing-for-uploaded-datasetdict\/12603\r\n\r\nAm I the one who added this dataset ?\r\n\r\nNo -> @dansbecker","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3392\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3392\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3391","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3391\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3391\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3391\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3391","id":1072849055,"node_id":"I_kwDODunzps4_8mCf","number":3391,"title":"method to select columns","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-07T02:44:19Z","updated_at":"2021-12-07T02:45:27Z","closed_at":"2021-12-07T02:45:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\n* There is currently no way to select some columns of a dataset. In pandas, one can use `df[['col1', 'col2']]` to select columns, but in `datasets`, it results in error.\r\n\r\n**Describe the solution you'd like**\r\n\r\n* A new method that can be used to create a new dataset with only a list of specified columns.\r\n\r\n**Describe alternatives you've considered**\r\n\r\n`.remove_columns(self, columns: Union[str, List[str]], inverse: bool = False)`\r\nOr \r\n`.select(self, indices: Iterable = None, columns: List[str] = None)`\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3391\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3391\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3390","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3390\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3390\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3390\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3390","id":1072462456,"node_id":"I_kwDODunzps4_7Hp4","number":3390,"title":"Loading dataset throws \"KeyError: 'Field \"builder_name\" does not exist in table schema'\"","user":{"login":"R4ZZ3","id":25264037,"node_id":"MDQ6VXNlcjI1MjY0MDM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25264037?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/R4ZZ3","html_url":"https:\/\/github.com\/R4ZZ3","followers_url":"https:\/\/api.github.com\/users\/R4ZZ3\/followers","following_url":"https:\/\/api.github.com\/users\/R4ZZ3\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/R4ZZ3\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/R4ZZ3\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/R4ZZ3\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/R4ZZ3\/orgs","repos_url":"https:\/\/api.github.com\/users\/R4ZZ3\/repos","events_url":"https:\/\/api.github.com\/users\/R4ZZ3\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/R4ZZ3\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-06T18:22:49Z","updated_at":"2021-12-06T20:22:05Z","closed_at":"2021-12-06T20:22:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI have prepared dataset to datasets and now I am trying to load it back Finnish-NLP\/voxpopuli_fi\r\nI get \"KeyError: 'Field \"builder_name\" does not exist in table schema'\"\r\n\r\nMy dataset folder and files should be like @patrickvonplaten has here https:\/\/huggingface.co\/datasets\/flax-community\/german-common-voice-processed\r\n\r\nHow my voxpopuli dataset looks like:\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/144895598-b7d9ae91-b04a-4046-9f06-b71ff0824d13.png)\r\n\r\nPart of the processing (path column is the absolute path to audio files)\r\n```\r\ndef add_audio_column(example):\r\n example['audio'] = example['path']\r\n return example\r\n\r\nvoxpopuli = voxpopuli.map(add_audio_column)\r\nvoxpopuli.cast_column(\"audio\", Audio())\r\nvoxpopuli[\"audio\"] <-- to my knowledge this does load the local files and prepares those arrays\r\nvoxpopuli = voxpopuli.cast_column(\"audio\", Audio(sampling_rate=16_000)) resampling 16kHz\r\n```\r\n\r\nI have then saved it to disk_\r\n`voxpopuli.save_to_disk('\/asr_disk\/datasets_processed_new\/voxpopuli')`\r\n\r\nand made folder structure same as @patrickvonplaten \r\nI also get same error while trying to load_dataset from his repo:\r\n![image](https:\/\/user-images.githubusercontent.com\/25264037\/144895872-e9b8f326-cf2b-46cf-9417-606a0ce14077.png)\r\n\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset = load_dataset(\"Finnish-NLP\/voxpopuli_fi\")\r\n```\r\n\r\n## Expected results\r\nDataset is loaded correctly and looks like in the first picture\r\n\r\n## Actual results\r\nLoading throws keyError:\r\nKeyError: 'Field \"builder_name\" does not exist in table schema'\r\n\r\n\r\nResources I have been trying to follow:\r\nhttps:\/\/huggingface.co\/docs\/datasets\/audio_process.html \r\nhttps:\/\/huggingface.co\/docs\/datasets\/share_dataset.html\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.16.2.dev0\r\n- Platform: Ubuntu 20.04.2 LTS\r\n- Python version: 3.8.12\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3390\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3390\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3389","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3389\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3389\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3389\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3389","id":1072191865,"node_id":"I_kwDODunzps4_6Fl5","number":3389,"title":"Add EDGAR","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-06T14:06:11Z","updated_at":"2021-12-06T14:06:21Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** EDGAR Database\r\n- **Description:** https:\/\/www.sec.gov\/edgar\/about EDGAR, the Electronic Data Gathering, Analysis, and Retrieval system, is the primary system for companies and others submitting documents under the Securities Act of 1933, the Securities Exchange Act of 1934, the Trust Indenture Act of 1939, and the Investment Company Act of 1940. Containing millions of company and individual filings, EDGAR benefits investors, corporations, and the U.S. economy overall by increasing the efficiency, transparency, and fairness of the securities markets. The system processes about 3,000 filings per day, serves up 3,000 terabytes of data to the public annually, and accommodates 40,000 new filers per year on average. EDGAR\u00ae and EDGARLink\u00ae are registered trademarks of the SEC.\r\n- **Data:** https:\/\/www.sec.gov\/os\/accessing-edgar-data\r\n- **Motivation:** Enabling and improving FSI (Financial Services Industry) datasets to increase ease of use \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3389\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3389\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3388","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3388\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3388\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3388\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3388","id":1072022021,"node_id":"PR_kwDODunzps4vbnyY","number":3388,"title":"Fix flaky test of the temporary directory used by load_from_disk","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-06T11:09:31Z","updated_at":"2021-12-06T11:25:03Z","closed_at":"2021-12-06T11:24:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3388","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3388","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3388.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3388.patch","merged_at":"2021-12-06T11:24:49Z"},"body":"The test is flaky, here is an example of random CI failure:\r\nhttps:\/\/github.com\/huggingface\/datasets\/commit\/73ed6615b4b3eb74d5311684f7b9e05cdb76c989\r\n\r\nI fixed that by not checking the content of the random part of the temporary directory name","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3388\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3388\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3387","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3387\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3387\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3387\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3387","id":1071836456,"node_id":"PR_kwDODunzps4vbAyC","number":3387,"title":"Create Language Modeling task","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-06T07:56:07Z","updated_at":"2021-12-17T17:18:28Z","closed_at":"2021-12-17T17:18:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3387","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3387","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3387.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3387.patch","merged_at":"2021-12-17T17:18:27Z"},"body":"Create Language Modeling task to be able to specify the input \"text\" column in a dataset.\r\n\r\nThis can be useful for datasets which are not exclusively used for language modeling and have more than one column:\r\n- for text classification datasets (with columns \"review\" and \"rating\", for example), the Language Modeling task can be used to specify the \"text\" column (\"review\" in this case).\r\n\r\nTODO:\r\n- [ ] Add the LanguageModeling task to all dataset scripts which can be used for language modeling","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3387\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3387\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3386","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3386\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3386\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3386\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3386","id":1071813141,"node_id":"PR_kwDODunzps4va7-2","number":3386,"title":"Fix typos in dataset cards","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-06T07:20:40Z","updated_at":"2021-12-06T09:30:55Z","closed_at":"2021-12-06T09:30:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3386","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3386","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3386.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3386.patch","merged_at":"2021-12-06T09:30:54Z"},"body":"This PR:\r\n- Fix typos in dataset cards\r\n- Fix Papers With Code ID for:\r\n - Bilingual Corpus of Arabic-English Parallel Tweets\r\n - Tweets Hate Speech Detection\r\n- Add pretty name tags","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3386\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3386\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3385","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3385\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3385\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3385\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3385","id":1071742310,"node_id":"I_kwDODunzps4_4X1m","number":3385,"title":"None batched `with_transform`, `set_transform`","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-06T05:20:54Z","updated_at":"2022-01-17T15:25:01Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nA `torch.utils.data.Dataset.__getitem__` operates on a single example.\r\nBut \ud83e\udd17 `Datasets.with_transform` doesn't seem to allow non-batched transform.\r\n\r\n**Describe the solution you'd like**\r\n\r\nHave a `batched=True` argument in `Datasets.with_transform`\r\n\r\n**Describe alternatives you've considered**\r\n\r\n* Convert a non-batched transform function to batched one myself. \r\n* Wrap a \ud83e\udd17 Dataset with torch Dataset, and add a `__getitem__`. \ud83d\ude44\r\n* Have `lazy=False` in `Dataset.map`, and returns a `LazyDataset` if `lazy=True`. This way the same `map` interface can be used, and existing code can be updated with one argument change.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3385\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3385\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3384","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3384\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3384\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3384\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3384","id":1071594165,"node_id":"PR_kwDODunzps4vaNwL","number":3384,"title":"Adding mMARCO dataset","user":{"login":"lhbonifacio","id":17603035,"node_id":"MDQ6VXNlcjE3NjAzMDM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17603035?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhbonifacio","html_url":"https:\/\/github.com\/lhbonifacio","followers_url":"https:\/\/api.github.com\/users\/lhbonifacio\/followers","following_url":"https:\/\/api.github.com\/users\/lhbonifacio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhbonifacio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhbonifacio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhbonifacio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhbonifacio\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhbonifacio\/repos","events_url":"https:\/\/api.github.com\/users\/lhbonifacio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhbonifacio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-05T23:59:11Z","updated_at":"2021-12-12T15:27:36Z","closed_at":"2021-12-12T15:27:36Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3384","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3384","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3384.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3384.patch","merged_at":null},"body":"We are adding mMARCO dataset to HuggingFace datasets repo.\r\nThis way, all the languages covered in the translation are available in a easy way.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3384\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3384\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3383","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3383\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3383\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3383\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3383","id":1071551884,"node_id":"PR_kwDODunzps4vaFpm","number":3383,"title":"add Georgian data in cc100.","user":{"login":"AnzorGozalishvili","id":55232459,"node_id":"MDQ6VXNlcjU1MjMyNDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55232459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AnzorGozalishvili","html_url":"https:\/\/github.com\/AnzorGozalishvili","followers_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/followers","following_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/orgs","repos_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/repos","events_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AnzorGozalishvili\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-05T20:38:09Z","updated_at":"2021-12-14T14:37:23Z","closed_at":"2021-12-14T14:37:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3383","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3383","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3383.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3383.patch","merged_at":"2021-12-14T14:37:22Z"},"body":"update cc100 dataset to support loading Georgian (ka) data which is originally available in CC100 dataset source.\r\nAll tests are passed. \r\nDummy data generated.\r\nmetadata generated.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3383\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3383\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3382","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3382\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3382\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3382\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3382","id":1071293299,"node_id":"PR_kwDODunzps4vZT2K","number":3382,"title":"#3337 Add typing overloads to Dataset.__getitem__ for mypy","user":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-04T20:54:49Z","updated_at":"2021-12-14T10:28:55Z","closed_at":"2021-12-14T10:28:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3382","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3382","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3382.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3382.patch","merged_at":"2021-12-14T10:28:54Z"},"body":"Add typing overloads to Dataset.__getitem__ for mypy\r\n\r\nFixes #3337 \r\n\r\n**Iterable**\r\nIterable from `collections` cannot have a type, so you can't do `Iterable[int]` for example. `typing` has a Generic version that builds upon the one from `collections`.\r\n\r\n**Flake8**\r\nI had to add `# noqa: F811`, this is a bug from Flake8.\r\n\r\ndatasets uses flake8==3.7.9 which released in October 2019 if I update flake8 (4.0.1), I no longer get these errors, but I did not want to make the update without your approval. (It also triggers other errors like no args in f-strings.)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3382\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3382\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3381","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3381\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3381\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3381\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3381","id":1071283879,"node_id":"I_kwDODunzps4_2n6n","number":3381,"title":"Unable to load audio_features from common_voice dataset","user":{"login":"ashu5644","id":8268102,"node_id":"MDQ6VXNlcjgyNjgxMDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8268102?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ashu5644","html_url":"https:\/\/github.com\/ashu5644","followers_url":"https:\/\/api.github.com\/users\/ashu5644\/followers","following_url":"https:\/\/api.github.com\/users\/ashu5644\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ashu5644\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ashu5644\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ashu5644\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ashu5644\/orgs","repos_url":"https:\/\/api.github.com\/users\/ashu5644\/repos","events_url":"https:\/\/api.github.com\/users\/ashu5644\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ashu5644\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-04T19:59:11Z","updated_at":"2021-12-06T17:52:42Z","closed_at":"2021-12-06T17:52:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am not able to load audio features from common_voice dataset\r\n\r\n## Steps to reproduce the bug\r\n\r\n```\r\nfrom datasets import load_dataset\r\nimport torchaudio\r\n\r\ntest_dataset = load_dataset(\"common_voice\", \"hi\", split=\"test[:2%]\")\r\nresampler = torchaudio.transforms.Resample(48_000, 16_000)\r\n\r\ndef speech_file_to_array_fn(batch):\r\n speech_array, sampling_rate = torchaudio.load(batch[\"path\"])\r\n batch[\"speech\"] = resampler(speech_array).squeeze().numpy()\r\n return batch\r\ntest_dataset = test_dataset.map(speech_file_to_array_fn)\r\n```\r\n## Expected results\r\n\r\nThis piece of code should return test_dataset after loading audio features.\r\n\r\n## Actual results\r\n\r\nReusing dataset common_voice (\/home\/jovyan\/.cache\/huggingface\/datasets\/common_voice\/hi\/6.1.0\/b879a355caa529b11f2249400b61cadd0d9433f334d5c60f8c7216ccedfecfe1)\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/transformers\/configuration_utils.py:341: UserWarning: Passing `gradient_checkpointing` to a config initialization is deprecated and will be removed in v5 Transformers. Using `model.gradient_checkpointing_enable()` instead, or if you are using the `Trainer` API, pass `gradient_checkpointing=True` in your `TrainingArguments`.\r\n \"Passing `gradient_checkpointing` to a config initialization is deprecated and will be removed in v5 \"\r\nSpecial tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\r\n 0%| | 0\/3 [00:00\r\n test_dataset = test_dataset.map(speech_file_to_array_fn)\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 2036, in map\r\n desc=desc,\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 518, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 485, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 411, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 2368, in _map_single\r\n example = apply_function_on_filtered_inputs(example, i, offset=offset)\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 2277, in apply_function_on_filtered_inputs\r\n processed_inputs = function(*fn_args, *additional_args, **fn_kwargs)\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1978, in decorated\r\n result = f(decorated_item, *args, **kwargs)\r\n File \"demo_file.py\", line 19, in speech_file_to_array_fn\r\n speech_array, sampling_rate = torchaudio.load(batch[\"path\"])\r\n File \"\/opt\/conda\/lib\/python3.7\/site-packages\/torchaudio\/backend\/sox_io_backend.py\", line 154, in load\r\n filepath, frame_offset, num_frames, normalize, channels_first, format)\r\nRuntimeError: Error loading audio file: failed to open file common_voice_hi_23795358.mp3\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux-4.14.243 with-debian-bullseye-sid\r\n- Python version: 3.7.9\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3381\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3381\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3380","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3380\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3380\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3380\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3380","id":1071166270,"node_id":"I_kwDODunzps4_2LM-","number":3380,"title":"[Quick poll] Give your opinion on the future of the Hugging Face Open Source ecosystem!","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-04T09:18:33Z","updated_at":"2022-01-11T12:29:53Z","closed_at":"2022-01-11T12:29:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Thanks to all of you, `datasets` will pass 11.5k stars :star2: this week!\r\n\r\nIf you have a couple of minutes and want to participate in shaping the future of the ecosystem, please share your thoughts: \r\n\r\n[**hf.co\/oss-survey**](https:\/\/hf.co\/oss-survey)\r\n\r\n(please reply in the above feedback form rather than to this thread)\r\n\r\nThank you all on behalf of the HuggingFace team! \ud83e\udd17","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3380\/reactions","total_count":5,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":3,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3380\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3379","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3379\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3379\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3379\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3379","id":1071079146,"node_id":"PR_kwDODunzps4vYr7K","number":3379,"title":"iter_archive on zipfiles with better compression type check","user":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-12-04T01:04:48Z","updated_at":"2022-02-11T14:49:21Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3379","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3379","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3379.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3379.patch","merged_at":null},"body":"Hello @lhoestq , thank you for your detailed answer on previous PR ! \r\nI made this new PR because I misused git on the previous one #3347.\r\nRelated issue #3272.\r\n\r\n# Comments : \r\n\r\n* For extension check I used the `_get_extraction_protocol` function in **download_manager.py** with a slight change and called it `_get_extraction_protocol_local`: \r\n\r\n**I removed this part :**\r\n ```python\r\n elif path.endswith(\".tar.gz\") or path.endswith(\".tgz\"):\r\n raise NotImplementedError(\r\n f\"Extraction protocol for TAR archives like '{urlpath}' is not implemented in streaming mode. Please use `dl_manager.iter_archive` instead.\"\r\n )\r\n```\r\n**And also changed :** \r\n```diff\r\n- extension = path.split(\".\")[-1]\r\n+ extension = \"tar\" if path.endswith(\".tar.gz\") else path.split(\".\")[-1]\r\n```\r\nThe reason for this is a compression like **.tar.gz** will be considered a **.gz** which is handled with **zipfile**, though **tar.gz** can only be opened using **tarfile**.\r\n\r\nPlease tell me if there's anything to change.\r\n\r\n# Tasks : \r\n- [x] download_manager.py\r\n- [x] streaming_download_manager.py ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3379\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3379\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3378","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3378\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3378\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3378\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3378","id":1070580126,"node_id":"PR_kwDODunzps4vXF1D","number":3378,"title":"Add The Pile subsets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-03T13:14:54Z","updated_at":"2021-12-09T18:11:25Z","closed_at":"2021-12-09T18:11:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3378","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3378","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3378.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3378.patch","merged_at":"2021-12-09T18:11:23Z"},"body":"Add The Pile subsets:\r\n- pubmed\r\n- ubuntu_irc\r\n- europarl\r\n- hacker_news\r\n- nih_exporter\r\n\r\nClose bigscience-workshop\/data_tooling#301.\r\n\r\nCC: @StellaAthena","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3378\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3378\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3377","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3377\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3377\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3377\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3377","id":1070562907,"node_id":"PR_kwDODunzps4vXCHn","number":3377,"title":"COCO \ud83e\udd65 on the \ud83e\udd17 Hub?","user":{"login":"merveenoyan","id":53175384,"node_id":"MDQ6VXNlcjUzMTc1Mzg0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53175384?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/merveenoyan","html_url":"https:\/\/github.com\/merveenoyan","followers_url":"https:\/\/api.github.com\/users\/merveenoyan\/followers","following_url":"https:\/\/api.github.com\/users\/merveenoyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/merveenoyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/merveenoyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/merveenoyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/merveenoyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/merveenoyan\/repos","events_url":"https:\/\/api.github.com\/users\/merveenoyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/merveenoyan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-12-03T12:55:27Z","updated_at":"2021-12-20T14:14:01Z","closed_at":"2021-12-20T14:14:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3377","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3377","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3377.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3377.patch","merged_at":null},"body":"This is a draft PR since I ran into few small problems. \r\nI referred to this TFDS code: https:\/\/github.com\/tensorflow\/datasets\/blob\/2538a08c184d53b37bfcf52cc21dd382572a88f4\/tensorflow_datasets\/object_detection\/coco.py\r\ncc: @mariosasko ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3377\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3377\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3376","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3376\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3376\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3376\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3376","id":1070522979,"node_id":"PR_kwDODunzps4vW5sB","number":3376,"title":"Update clue benchmark","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-03T12:06:01Z","updated_at":"2021-12-08T14:14:42Z","closed_at":"2021-12-08T14:14:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3376","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3376","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3376.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3376.patch","merged_at":"2021-12-08T14:14:41Z"},"body":"Fix #3374 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3376\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3376\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3375","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3375\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3375\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3375\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3375","id":1070454913,"node_id":"PR_kwDODunzps4vWrXz","number":3375,"title":"Support streaming zipped dataset repo by passing only repo name","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-12-03T10:43:05Z","updated_at":"2021-12-16T18:03:32Z","closed_at":"2021-12-16T18:03:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3375","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3375","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3375.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3375.patch","merged_at":"2021-12-16T18:03:31Z"},"body":"Proposed solution:\r\n- I have added the method `iter_files` to DownloadManager and StreamingDownloadManager\r\n- I use this in modules: \"csv\", \"json\", \"text\"\r\n- I test for CSV\/JSONL\/TXT zipped (and non-zipped) files, both in streaming and non-streaming modes\r\n\r\nFix #3373.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3375\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3375\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3374","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3374\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3374\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3374\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3374","id":1070426462,"node_id":"I_kwDODunzps4_zWle","number":3374,"title":"NonMatchingChecksumError for the CLUE:cluewsc2020, chid, c3 and tnews","user":{"login":"Namco0816","id":34687537,"node_id":"MDQ6VXNlcjM0Njg3NTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34687537?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Namco0816","html_url":"https:\/\/github.com\/Namco0816","followers_url":"https:\/\/api.github.com\/users\/Namco0816\/followers","following_url":"https:\/\/api.github.com\/users\/Namco0816\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Namco0816\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Namco0816\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Namco0816\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Namco0816\/orgs","repos_url":"https:\/\/api.github.com\/users\/Namco0816\/repos","events_url":"https:\/\/api.github.com\/users\/Namco0816\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Namco0816\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-12-03T10:10:54Z","updated_at":"2021-12-08T14:14:41Z","closed_at":"2021-12-08T14:14:41Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, it seems like there are updates in cluewsc2020, chid, c3 and tnews, since i could not load them due to the checksum error.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3374\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3374\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3373","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3373\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3373\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3373\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3373","id":1070406391,"node_id":"I_kwDODunzps4_zRr3","number":3373,"title":"Support streaming zipped CSV dataset repo by passing only repo name","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-12-03T09:48:24Z","updated_at":"2021-12-16T18:03:31Z","closed_at":"2021-12-16T18:03:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Given a community \ud83e\udd17 dataset repository containing only a zipped CSV file (only raw data, no loading script), I would like to load it in streaming mode without passing `data_files`:\r\n```\r\nds_name = \"bigscience-catalogue-data\/vietnamese_poetry_from_fsoft_ai_lab\"\r\nds = load_dataset(ds_name, split=\"train\", streaming=True, use_auth_token=True)\r\nitem = next(iter(ds))\r\n```\r\n\r\nCurrently, it gives a `FileNotFoundError` because there is no glob (no \"\\*\" after \"zip:\/\/\": \"zip:\/\/*\") in the passed URL:\r\n```\r\n'zip:\/\/::https:\/\/huggingface.co\/datasets\/bigscience-catalogue-data\/vietnamese_poetry_from_fsoft_ai_lab\/resolve\/e5d45f1bd9a8a798cc14f0a45ebc1ce91907c792\/poems_dataset.zip'\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3373\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3373\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3372","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3372\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3372\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3372\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3372","id":1069948178,"node_id":"I_kwDODunzps4_xh0S","number":3372,"title":"[SEO improvement] Add Dataset Metadata to make datasets indexable","user":{"login":"cakiki","id":3664563,"node_id":"MDQ6VXNlcjM2NjQ1NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3664563?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cakiki","html_url":"https:\/\/github.com\/cakiki","followers_url":"https:\/\/api.github.com\/users\/cakiki\/followers","following_url":"https:\/\/api.github.com\/users\/cakiki\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cakiki\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cakiki\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cakiki\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cakiki\/orgs","repos_url":"https:\/\/api.github.com\/users\/cakiki\/repos","events_url":"https:\/\/api.github.com\/users\/cakiki\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cakiki\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-02T20:21:07Z","updated_at":"2021-12-02T20:21:07Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Some people who host datasets on github seem to include a table of metadata at the end of their README.md to make the dataset indexable by [Google Dataset Search](https:\/\/datasetsearch.research.google.com\/) (See [here](https:\/\/github.com\/google-research\/google-research\/tree\/master\/goemotions#dataset-metadata) and [here](https:\/\/github.com\/cvdfoundation\/google-landmark#dataset-metadata)). This could be a useful addition to canonical datasets; perhaps even community datasets.\r\n\r\nI'll include a screenshot (as opposed to markdown) as an example so as not to have a github issue indexed as a dataset:\r\n\r\n> ![image](https:\/\/user-images.githubusercontent.com\/3664563\/144496173-953428cf-633a-4571-b75b-f099c6b2ed65.png)\r\n\r\n\r\n\r\n**_PS: It might very well be the case that this is already covered by some other markdown magic I'm not aware of._**\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3372\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3372\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3371","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3371\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3371\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3371\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3371","id":1069821335,"node_id":"PR_kwDODunzps4vUnbp","number":3371,"title":"New: Americas NLI dataset","user":{"login":"fdschmidt93","id":39233597,"node_id":"MDQ6VXNlcjM5MjMzNTk3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39233597?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fdschmidt93","html_url":"https:\/\/github.com\/fdschmidt93","followers_url":"https:\/\/api.github.com\/users\/fdschmidt93\/followers","following_url":"https:\/\/api.github.com\/users\/fdschmidt93\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fdschmidt93\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fdschmidt93\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fdschmidt93\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fdschmidt93\/orgs","repos_url":"https:\/\/api.github.com\/users\/fdschmidt93\/repos","events_url":"https:\/\/api.github.com\/users\/fdschmidt93\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fdschmidt93\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-02T17:44:59Z","updated_at":"2021-12-08T13:58:12Z","closed_at":"2021-12-08T13:58:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3371","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3371","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3371.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3371.patch","merged_at":"2021-12-08T13:58:11Z"},"body":"This PR adds the [Americas NLI](https:\/\/arxiv.org\/abs\/2104.08726) dataset, extension of XNLI to 10 low-resource indigenous languages spoken in the Americas: Ashaninka, Aymara, Bribri, Guarani, Nahuatl, Otomi, Quechua, Raramuri, Shipibo-Konibo, and Wixarika.\r\n\r\nOne odd thing (not sure) is that I had to set\r\n\r\n`datasets-cli dummy_data .\/datasets\/americas_nli\/ --auto_generate --n_lines 7500`\r\n\r\n`n_lines` very large to successfully generate the dummy files for all the subsets. Happy to get some guidance here.\r\n\r\nOtherwise, I hope everything is in order :)\r\n\r\ne: missed a step, onto fixing the tests\r\ne2: there you go -- hope it's ok to have added more languages with their ISO codes to `languages.json`, need those tests to pass :laughing: ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3371\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3371\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3370","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3370\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3370\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3370\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3370","id":1069735423,"node_id":"PR_kwDODunzps4vUVA3","number":3370,"title":"Document a training loop for streaming dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-02T16:17:00Z","updated_at":"2021-12-03T13:34:35Z","closed_at":"2021-12-03T13:34:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3370","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3370","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3370.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3370.patch","merged_at":"2021-12-03T13:34:34Z"},"body":"I added some docs about streaming dataset. In particular I added two subsections:\r\n- one on how to use `map` for preprocessing\r\n- one on how to use a streaming dataset in a pytorch training loop\r\n\r\ncc @patrickvonplaten @stevhliu if you have some comments\r\ncc @Rocketknight1 later we can add the one for TF and I might need your help ^^'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3370\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3370\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3369","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3369\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3369\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3369\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3369","id":1069587674,"node_id":"I_kwDODunzps4_wJza","number":3369,"title":"[Audio] Allow resampling for audio datasets in streaming mode","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-12-02T14:04:57Z","updated_at":"2021-12-16T15:55:19Z","closed_at":"2021-12-16T15:55:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Many audio datasets like Common Voice always need to be resampled. This can very easily be done in non-streaming mode as follows:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\"common_voice\", \"ab\", split=\"test\")\r\n\r\nds = ds.cast_column(\"audio\", Audio(sampling_rate=16_000))\r\n```\r\n\r\nHowever in streaming mode it fails currently:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\"common_voice\", \"ab\", split=\"test\", streaming=True)\r\n\r\nds = ds.cast_column(\"audio\", Audio(sampling_rate=16_000))\r\n```\r\n\r\nwith the following error:\r\n\r\n```\r\nAttributeError: 'IterableDataset' object has no attribute 'cast_column' \r\n```\r\n\r\nIt would be great if we could add such a feature (I'm not 100% sure though how complex this would be)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3369\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3369\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3368","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3368\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3368\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3368\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3368","id":1069403624,"node_id":"PR_kwDODunzps4vTObo","number":3368,"title":"Fix dict source_datasets tagset validator","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-02T10:52:20Z","updated_at":"2021-12-02T15:48:38Z","closed_at":"2021-12-02T15:48:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3368","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3368","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3368.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3368.patch","merged_at":"2021-12-02T15:48:37Z"},"body":"Currently, the `source_datasets` tag validation does not support passing a dict with configuration keys.\r\n\r\nThis PR:\r\n- Extends `tagset_validator` to support regex tags\r\n- Uses `tagset_validator` to validate dict `source_datasets`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3368\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3368\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3367","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3367\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3367\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3367\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3367","id":1069241274,"node_id":"PR_kwDODunzps4vSsfk","number":3367,"title":"Fix typo in other-structured-to-text task tag","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-02T08:02:27Z","updated_at":"2021-12-02T16:07:14Z","closed_at":"2021-12-02T16:07:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3367","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3367","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3367.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3367.patch","merged_at":"2021-12-02T16:07:13Z"},"body":"Fix typo in task tag: \r\n- `other-stuctured-to-text` (before)\r\n- `other-structured-to-text` (now)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3367\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3367\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3366","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3366\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3366\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3366\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3366","id":1069214022,"node_id":"I_kwDODunzps4_uulG","number":3366,"title":"Add multimodal datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-12-02T07:24:04Z","updated_at":"2021-12-02T07:33:33Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Epic issue to track the addition of multimodal datasets:\r\n- [ ] #2526\r\n- [ ] #1842\r\n- [ ] #1810\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\n@VictorSanh feel free to add and sort by priority any interesting dataset. I have added the multimodal dataset requests which were already present as issues.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3366\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3366\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3365","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3365\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3365\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3365\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3365","id":1069195887,"node_id":"I_kwDODunzps4_uqJv","number":3365,"title":"Add task tags for multimodal datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-02T06:58:20Z","updated_at":"2021-12-02T07:33:09Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## **Is your feature request related to a problem? Please describe.**\r\n\r\nCurrently, task tags are either exclusively related to text or speech processing:\r\n- https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/utils\/resources\/tasks.json\r\n\r\n## **Describe the solution you'd like**\r\n\r\nWe should also add tasks related to:\r\n- multimodality\r\n- image\r\n- video\r\n\r\nCC: @VictorSanh @lewtun @lhoestq @merveenoyan @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3365\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3365\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3364","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3364\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3364\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3364\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3364","id":1068851196,"node_id":"PR_kwDODunzps4vRaxq","number":3364,"title":"Use the Audio feature in the AutomaticSpeechRecognition template","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-01T20:42:26Z","updated_at":"2022-01-31T16:00:57Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3364","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3364","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3364.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3364.patch","merged_at":null},"body":"This updates the ASR template and all supported datasets to use the `Audio` feature","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3364\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3364\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3363","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3363\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3363\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3363\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3363","id":1068824340,"node_id":"PR_kwDODunzps4vRVCl","number":3363,"title":"Update URL of Jeopardy! dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-01T20:08:10Z","updated_at":"2021-12-03T12:35:01Z","closed_at":"2021-12-03T12:35:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3363","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3363","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3363.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3363.patch","merged_at":null},"body":"Updates the URL of the Jeopardy! dataset.\r\n\r\nFix #3361","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3363\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3363\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3362","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3362\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3362\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3362\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3362","id":1068809768,"node_id":"PR_kwDODunzps4vRR2r","number":3362,"title":"Adapt image datasets","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-01T19:52:01Z","updated_at":"2021-12-09T18:37:42Z","closed_at":"2021-12-09T18:37:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3362","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3362","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3362.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3362.patch","merged_at":"2021-12-09T18:37:41Z"},"body":"This PR:\r\n* adapts the ImageClassification template to use the new Image feature\r\n* adapts the following datasets to use the new Image feature:\r\n * beans (+ fixes streaming)\r\n * cast_vs_dogs (+ fixes streaming)\r\n * cifar10\r\n * cifar100\r\n * fashion_mnist\r\n * mnist\r\n * head_qa\r\n\r\ncc @nateraw","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3362\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3362\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3361","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3361\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3361\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3361\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3361","id":1068736268,"node_id":"I_kwDODunzps4_s58M","number":3361,"title":"Jeopardy _URL access denied","user":{"login":"tianjianjiang","id":4812544,"node_id":"MDQ6VXNlcjQ4MTI1NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4812544?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tianjianjiang","html_url":"https:\/\/github.com\/tianjianjiang","followers_url":"https:\/\/api.github.com\/users\/tianjianjiang\/followers","following_url":"https:\/\/api.github.com\/users\/tianjianjiang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tianjianjiang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tianjianjiang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tianjianjiang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tianjianjiang\/orgs","repos_url":"https:\/\/api.github.com\/users\/tianjianjiang\/repos","events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-01T18:21:33Z","updated_at":"2021-12-11T12:50:23Z","closed_at":"2021-12-06T11:16:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nhttp:\/\/skeeto.s3.amazonaws.com\/share\/JEOPARDY_QUESTIONS1.json.gz returns Access Denied now.\r\n\r\nHowever, https:\/\/drive.google.com\/file\/d\/0BwT5wj_P7BKXb2hfM3d2RHU1ckE\/view?usp=sharing from the original Reddit post https:\/\/www.reddit.com\/r\/datasets\/comments\/1uyd0t\/200000_jeopardy_questions_in_a_json_file\/ may work.\r\n\r\n\r\n## Steps to reproduce the bug\r\n```shell\r\n> python\r\nPython 3.7.12 (default, Sep 5 2021, 08:34:29)\r\n[Clang 11.0.3 (clang-1103.0.32.62)] on darwin\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n```\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> load_dataset(\"jeopardy\")\r\n```\r\n\r\n## Expected results\r\nThe download completes.\r\n\r\n## Actual results\r\n```shell\r\nDownloading: 4.18kB [00:00, 1.60MB\/s]\r\nDownloading: 2.03kB [00:00, 1.04MB\/s]\r\nUsing custom data configuration default\r\nDownloading and preparing dataset jeopardy\/default (download: 12.13 MiB, generated: 34.46 MiB, post-processed: Unknown size, total: 46.59 MiB) to \/Users\/mike\/.cache\/huggingface\/datasets\/jeopardy\/default\/0.1.0\/25ee3e4a73755e637b8810f6493fd36e4523dea3ca8a540529d0a6e24c7f9810...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 608, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/Users\/mike\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/jeopardy\/25ee3e4a73755e637b8810f6493fd36e4523dea3ca8a540529d0a6e24c7f9810\/jeopardy.py\", line 72, in _split_generators\r\n filepath = dl_manager.download_and_extract(_DATA_URL)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 197, in download\r\n download_func, url_or_urls, map_tuple=True, num_proc=download_config.num_proc, disable_tqdm=False\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 197, in map_nested\r\n return function(data_struct)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 305, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 594, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/skeeto.s3.amazonaws.com\/share\/JEOPARDY_QUESTIONS1.json.gz\r\n```\r\n\r\n---\r\n\r\n```shell\r\n> curl http:\/\/skeeto.s3.amazonaws.com\/share\/JEOPARDY_QUESTIONS1.json.gz\r\n```\r\n```xml\r\n\r\nAccessDenied<\/Code>Access Denied<\/Message>70Y9R36XNPEQXMGV<\/RequestId>G6F5AK4qo7JdaEdKGMtS0P6gdLPeFOdEfSEfvTOZEfk9km0\/jAfp08QLfKSTFFj1oWIKoAoBehM=<\/HostId><\/Error>\r\n```\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.0\r\n- Platform: macOS Catalina 10.15.7\r\n- Python version: 3.7.12\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3361\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3361\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3360","id":1068724697,"node_id":"PR_kwDODunzps4vQ_16","number":3360,"title":"Add The Pile USPTO subset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-01T18:08:05Z","updated_at":"2021-12-03T11:45:29Z","closed_at":"2021-12-03T11:45:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3360","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3360","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3360.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3360.patch","merged_at":"2021-12-03T11:45:27Z"},"body":"Add:\r\n- USPTO subset of The Pile: \"uspto\" config\r\n\r\nClose bigscience-workshop\/data_tooling#297.\r\n\r\nCC: @StellaAthena","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3360\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3359","id":1068638213,"node_id":"PR_kwDODunzps4vQtI0","number":3359,"title":"Add The Pile Free Law subset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-01T16:46:04Z","updated_at":"2021-12-06T10:12:17Z","closed_at":"2021-12-01T17:30:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3359","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3359","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3359.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3359.patch","merged_at":"2021-12-01T17:30:43Z"},"body":"Add:\r\n- Free Law subset of The Pile: \"free_law\" config\r\n\r\nClose bigscience-workshop\/data_tooling#75.\r\n\r\nCC: @StellaAthena","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3359\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3358","id":1068623216,"node_id":"I_kwDODunzps4_seVw","number":3358,"title":"add new field, and get errors","user":{"login":"yanllearnn","id":38966558,"node_id":"MDQ6VXNlcjM4OTY2NTU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38966558?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yanllearnn","html_url":"https:\/\/github.com\/yanllearnn","followers_url":"https:\/\/api.github.com\/users\/yanllearnn\/followers","following_url":"https:\/\/api.github.com\/users\/yanllearnn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yanllearnn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yanllearnn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yanllearnn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yanllearnn\/orgs","repos_url":"https:\/\/api.github.com\/users\/yanllearnn\/repos","events_url":"https:\/\/api.github.com\/users\/yanllearnn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yanllearnn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-12-01T16:35:38Z","updated_at":"2021-12-02T02:26:22Z","closed_at":"2021-12-02T02:26:22Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"after adding new field **tokenized_examples[\"example_id\"]**, and get errors below,\r\nI think it is due to changing data to tensor, and **tokenized_examples[\"example_id\"]** is string list \r\n**all fields**\r\n```\r\n***************** train_dataset 1: Dataset({\r\n features: ['attention_mask', 'end_positions', 'example_id', 'input_ids', 'start_positions', 'token_type_ids'],\r\n num_rows: 87714\r\n})\r\n```\r\n\r\n**Errors**\r\n```\r\nTraceback (most recent call last):\r\n File \"\/usr\/local\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 705, in convert_to_tensors\r\n tensor = as_tensor(value)\r\nValueError: too many dimensions 'str'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3358\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3357","id":1068607382,"node_id":"PR_kwDODunzps4vQmcL","number":3357,"title":"Update README.md","user":{"login":"apergo-ai","id":68908804,"node_id":"MDQ6VXNlcjY4OTA4ODA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68908804?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apergo-ai","html_url":"https:\/\/github.com\/apergo-ai","followers_url":"https:\/\/api.github.com\/users\/apergo-ai\/followers","following_url":"https:\/\/api.github.com\/users\/apergo-ai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apergo-ai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apergo-ai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apergo-ai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apergo-ai\/orgs","repos_url":"https:\/\/api.github.com\/users\/apergo-ai\/repos","events_url":"https:\/\/api.github.com\/users\/apergo-ai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apergo-ai\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-01T16:20:46Z","updated_at":"2021-12-01T16:20:46Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3357","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3357","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3357.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3357.patch","merged_at":null},"body":"After having worked a bit with the dataset.\r\nAs far as I know, it is solely in English (en-US). There are only a few mails in Spanish, French or German (less than a dozen I would estimate).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3357\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3356","id":1068503932,"node_id":"PR_kwDODunzps4vQQLD","number":3356,"title":"to_tf_dataset() refactor","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-12-01T14:54:30Z","updated_at":"2021-12-09T10:26:53Z","closed_at":"2021-12-09T10:26:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3356","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3356","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3356.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3356.patch","merged_at":"2021-12-09T10:26:53Z"},"body":"This is the promised cleanup to `to_tf_dataset()` now that the course is out of the way! The main changes are:\r\n\r\n- A collator is always required (there was way too much hackiness making things like labels work without it)\r\n- Lots of cleanup and a lot of code moved to `_get_output_signature`\r\n- Should now handle it gracefully when the data collator adds unexpected columns","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":3,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3356\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3355","id":1068468573,"node_id":"PR_kwDODunzps4vQIoy","number":3355,"title":"Extend support for streaming datasets that use pd.read_excel","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-12-01T14:22:43Z","updated_at":"2021-12-17T07:24:19Z","closed_at":"2021-12-17T07:24:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3355","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3355","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3355.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3355.patch","merged_at":"2021-12-17T07:24:18Z"},"body":"This PR fixes error:\r\n```\r\nValueError: Cannot seek streaming HTTP file\r\n```\r\n\r\nCC: @severo ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3355\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3354","id":1068307271,"node_id":"PR_kwDODunzps4vPl9d","number":3354,"title":"Remove duplicate name from dataset cards","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-01T11:45:40Z","updated_at":"2021-12-01T13:14:30Z","closed_at":"2021-12-01T13:14:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3354","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3354","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3354.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3354.patch","merged_at":"2021-12-01T13:14:29Z"},"body":"Remove duplicate name from dataset card for:\r\n- ajgt_twitter_ar\r\n- emotone_ar","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3354\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3353","id":1068173783,"node_id":"I_kwDODunzps4_qwnX","number":3353,"title":" add one field \"example_id\", but I can't see it in the \"comput_loss\" function","user":{"login":"yanllearnn","id":38966558,"node_id":"MDQ6VXNlcjM4OTY2NTU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38966558?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yanllearnn","html_url":"https:\/\/github.com\/yanllearnn","followers_url":"https:\/\/api.github.com\/users\/yanllearnn\/followers","following_url":"https:\/\/api.github.com\/users\/yanllearnn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yanllearnn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yanllearnn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yanllearnn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yanllearnn\/orgs","repos_url":"https:\/\/api.github.com\/users\/yanllearnn\/repos","events_url":"https:\/\/api.github.com\/users\/yanllearnn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yanllearnn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-12-01T09:35:09Z","updated_at":"2021-12-01T16:02:39Z","closed_at":"2021-12-01T16:02:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I add one field **example_id**, but I can't see it in the **comput_loss** function, how can I do this? below is the information of inputs\r\n\r\n```\r\n*********************** inputs: {'attention_mask': tensor([[1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0],\r\n ...,\r\n [1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0],\r\n [1, 1, 1, ..., 0, 0, 0]], device='cuda:0'), 'end_positions': tensor([ 25, 97, 93, 44, 25, 112, 109, 134], device='cuda:0'), 'input_ids': tensor([[ 101, 2054, 2390, ..., 0, 0, 0],\r\n [ 101, 2054, 2515, ..., 0, 0, 0],\r\n [ 101, 2054, 2106, ..., 0, 0, 0],\r\n ...,\r\n [ 101, 2339, 2001, ..., 0, 0, 0],\r\n [ 101, 2054, 2515, ..., 0, 0, 0],\r\n [ 101, 2054, 2003, ..., 0, 0, 0]], device='cuda:0'), 'start_positions': tensor([ 20, 90, 89, 41, 25, 96, 106, 132], device='cuda:0'), 'token_type_ids': tensor([[0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0],\r\n ...,\r\n [0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0],\r\n [0, 0, 0, ..., 0, 0, 0]], device='cuda:0')} \r\n```\r\n\r\n```\r\n# This function preprocesses a question answering dataset, tokenizing the question and context text\r\n# and finding the right offsets for the answer spans in the tokenized context (to use as labels).\r\n# Adapted from https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/pytorch\/question-answering\/run_qa.py\r\ndef prepare_train_dataset_qa(examples, tokenizer, max_seq_length=None):\r\n questions = [q.lstrip() for q in examples[\"question\"]]\r\n max_seq_length = tokenizer.model_max_length\r\n # tokenize both questions and the corresponding context\r\n # if the context length is longer than max_length, we split it to several\r\n # chunks of max_length\r\n tokenized_examples = tokenizer(\r\n questions,\r\n examples[\"context\"],\r\n truncation=\"only_second\",\r\n max_length=max_seq_length,\r\n stride=min(max_seq_length \/\/ 2, 128),\r\n return_overflowing_tokens=True,\r\n return_offsets_mapping=True,\r\n padding=\"max_length\"\r\n )\r\n\r\n # Since one example might give us several features if it has a long context,\r\n # we need a map from a feature to its corresponding example.\r\n sample_mapping = tokenized_examples.pop(\"overflow_to_sample_mapping\")\r\n # The offset mappings will give us a map from token to character position\r\n # in the original context. This will help us compute the start_positions\r\n # and end_positions to get the final answer string.\r\n offset_mapping = tokenized_examples.pop(\"offset_mapping\")\r\n\r\n tokenized_examples[\"start_positions\"] = []\r\n tokenized_examples[\"end_positions\"] = []\r\n\r\n tokenized_examples[\"example_id\"] = []\r\n\r\n for i, offsets in enumerate(offset_mapping):\r\n input_ids = tokenized_examples[\"input_ids\"][i]\r\n # We will label features not containing the answer the index of the CLS token.\r\n cls_index = input_ids.index(tokenizer.cls_token_id)\r\n sequence_ids = tokenized_examples.sequence_ids(i)\r\n # from the feature idx to sample idx\r\n sample_index = sample_mapping[i]\r\n # get the answer for a feature\r\n answers = examples[\"answers\"][sample_index]\r\n\r\n tokenized_examples[\"example_id\"].append(examples[\"id\"][sample_index])\r\n\r\n if len(answers[\"answer_start\"]) == 0:\r\n tokenized_examples[\"start_positions\"].append(cls_index)\r\n tokenized_examples[\"end_positions\"].append(cls_index)\r\n else:\r\n # Start\/end character index of the answer in the text.\r\n start_char = answers[\"answer_start\"][0]\r\n end_char = start_char + len(answers[\"text\"][0])\r\n\r\n # Start token index of the current span in the text.\r\n token_start_index = 0\r\n while sequence_ids[token_start_index] != 1:\r\n token_start_index += 1\r\n\r\n # End token index of the current span in the text.\r\n token_end_index = len(input_ids) - 1\r\n while sequence_ids[token_end_index] != 1:\r\n token_end_index -= 1\r\n\r\n # Detect if the answer is out of the span (in which case this feature is labeled with the CLS index).\r\n if not (offsets[token_start_index][0] <= start_char and\r\n offsets[token_end_index][1] >= end_char):\r\n tokenized_examples[\"start_positions\"].append(cls_index)\r\n tokenized_examples[\"end_positions\"].append(cls_index)\r\n else:\r\n # Otherwise move the token_start_index and token_end_index to the two ends of the answer.\r\n # Note: we could go after the last offset if the answer is the last word (edge case).\r\n while token_start_index < len(offsets) and \\\r\n offsets[token_start_index][0] <= start_char:\r\n token_start_index += 1\r\n tokenized_examples[\"start_positions\"].append(\r\n token_start_index - 1)\r\n while offsets[token_end_index][1] >= end_char:\r\n token_end_index -= 1\r\n tokenized_examples[\"end_positions\"].append(token_end_index + 1)\r\n\r\n return tokenized_examples\r\n```\r\n\r\n_Originally posted by @yanllearnn in https:\/\/github.com\/huggingface\/datasets\/issues\/3333#issuecomment-983457161_","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3353\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3352","id":1068102994,"node_id":"PR_kwDODunzps4vO6uZ","number":3352,"title":"Make LABR dataset streamable","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-01T08:22:27Z","updated_at":"2021-12-01T10:49:02Z","closed_at":"2021-12-01T10:49:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3352","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3352","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3352.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3352.patch","merged_at":"2021-12-01T10:49:01Z"},"body":"Fix LABR dataset to make it streamable.\r\n\r\nRelated to: #3350.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3352\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3351","id":1068094873,"node_id":"PR_kwDODunzps4vO5AS","number":3351,"title":"Add VCTK dataset","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-12-01T08:13:17Z","updated_at":"2021-12-28T15:12:00Z","closed_at":"2021-12-28T15:05:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3351","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3351","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3351.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3351.patch","merged_at":"2021-12-28T15:05:07Z"},"body":"Fixes #1837. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3351\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3350","id":1068078160,"node_id":"PR_kwDODunzps4vO1aj","number":3350,"title":"Avoid content-encoding issue while streaming datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-12-01T07:56:48Z","updated_at":"2021-12-01T08:15:01Z","closed_at":"2021-12-01T08:15:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3350","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3350","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3350.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3350.patch","merged_at":"2021-12-01T08:15:00Z"},"body":"This PR will fix streaming of datasets served with gzip content-encoding:\r\n```\r\nClientPayloadError: 400, message='Can not decode content-encoding: gzip'\r\n```\r\n\r\nFix #2918.\r\n\r\nCC: @severo ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3350\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3349","id":1067853601,"node_id":"PR_kwDODunzps4vOF-s","number":3349,"title":"raise exception instead of using assertions.","user":{"login":"manisnesan","id":153142,"node_id":"MDQ6VXNlcjE1MzE0Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/153142?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manisnesan","html_url":"https:\/\/github.com\/manisnesan","followers_url":"https:\/\/api.github.com\/users\/manisnesan\/followers","following_url":"https:\/\/api.github.com\/users\/manisnesan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manisnesan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manisnesan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manisnesan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manisnesan\/orgs","repos_url":"https:\/\/api.github.com\/users\/manisnesan\/repos","events_url":"https:\/\/api.github.com\/users\/manisnesan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manisnesan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-12-01T01:37:51Z","updated_at":"2021-12-20T16:07:27Z","closed_at":"2021-12-20T16:07:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3349","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3349","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3349.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3349.patch","merged_at":"2021-12-20T16:07:27Z"},"body":"fix for the remaining files https:\/\/github.com\/huggingface\/datasets\/issues\/3171","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3349\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3348","id":1067831113,"node_id":"PR_kwDODunzps4vOBOQ","number":3348,"title":"BLEURT: Match key names to correspond with filename","user":{"login":"jaehlee","id":11873078,"node_id":"MDQ6VXNlcjExODczMDc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11873078?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaehlee","html_url":"https:\/\/github.com\/jaehlee","followers_url":"https:\/\/api.github.com\/users\/jaehlee\/followers","following_url":"https:\/\/api.github.com\/users\/jaehlee\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaehlee\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaehlee\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaehlee\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaehlee\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaehlee\/repos","events_url":"https:\/\/api.github.com\/users\/jaehlee\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaehlee\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-12-01T01:01:18Z","updated_at":"2021-12-07T16:06:57Z","closed_at":"2021-12-07T16:06:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3348","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3348","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3348.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3348.patch","merged_at":"2021-12-07T16:06:57Z"},"body":"In order to properly locate downloaded ckpt files key name needs to match filename. Correcting change introduced in #3235 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3348\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3347","id":1067738902,"node_id":"PR_kwDODunzps4vNthw","number":3347,"title":"iter_archive for zip files ","user":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-30T22:34:17Z","updated_at":"2021-12-04T00:22:22Z","closed_at":"2021-12-04T00:22:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3347","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3347","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3347.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3347.patch","merged_at":null},"body":"* In this PR, I added the option to iterate through zipfiles for `download_manager.py` only. \r\n* Next PR will be the same applied to `streaming_download_manager.py`.\r\n* Related issue #3272.\r\n## Comments : \r\n* There is no `.isreg()` equivalent in zipfile library to check if file is Regular so I used `.is_dir()` instead to skip directories.\r\n* For now I got `streaming_download_manager.py` working for local zip files, but not for urls. I get the following error when I test it on an archive in google drive, so still working on it. `BlockSizeError: Got more bytes so far (>2112) than requested (22)`\r\n\r\n\r\n## Tasks : \r\n- [x] download_manager.py\r\n- [ ] streaming_download_manager.py","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3347\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3346","id":1067632365,"node_id":"I_kwDODunzps4_osbt","number":3346,"title":"Failed to convert `string` with pyarrow for QED since 1.15.0","user":{"login":"tianjianjiang","id":4812544,"node_id":"MDQ6VXNlcjQ4MTI1NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4812544?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tianjianjiang","html_url":"https:\/\/github.com\/tianjianjiang","followers_url":"https:\/\/api.github.com\/users\/tianjianjiang\/followers","following_url":"https:\/\/api.github.com\/users\/tianjianjiang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tianjianjiang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tianjianjiang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tianjianjiang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tianjianjiang\/orgs","repos_url":"https:\/\/api.github.com\/users\/tianjianjiang\/repos","events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-11-30T20:11:42Z","updated_at":"2021-12-14T14:39:05Z","closed_at":"2021-12-14T14:39:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nLoading QED was fine until 1.15.0.\r\nrelated: bigscience-workshop\/promptsource#659, bigscience-workshop\/promptsource#670\r\n\r\nNot sure where the root cause is, but here are some candidates:\r\n- #3158\r\n- #3120\r\n- #3196\r\n- #2891\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset(\"qed\")\r\n```\r\n\r\n## Expected results\r\nLoading completed.\r\n\r\n## Actual results\r\n```shell\r\nArrowInvalid: Could not convert in with type str: tried to convert to boolean\r\nTraceback:\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/streamlit\/script_runner.py\", line 354, in _run_script\r\n exec(code, module.__dict__)\r\nFile \"\/Users\/s0s0cr3\/Documents\/GitHub\/promptsource\/promptsource\/app.py\", line 260, in \r\n dataset = get_dataset(dataset_key, str(conf_option.name) if conf_option else None)\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/streamlit\/caching.py\", line 543, in wrapped_func\r\n return get_or_create_cached_value()\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/streamlit\/caching.py\", line 527, in get_or_create_cached_value\r\n return_value = func(*args, **kwargs)\r\nFile \"\/Users\/s0s0cr3\/Documents\/GitHub\/promptsource\/promptsource\/utils.py\", line 49, in get_dataset\r\n builder_instance.download_and_prepare()\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/builder.py\", line 1106, in _prepare_split\r\n num_examples, num_bytes = writer.finalize()\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/arrow_writer.py\", line 456, in finalize\r\n self.write_examples_on_file()\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/arrow_writer.py\", line 325, in write_examples_on_file\r\n pa_array = pa.array(typed_sequence)\r\nFile \"pyarrow\/array.pxi\", line 222, in pyarrow.lib.array\r\nFile \"pyarrow\/array.pxi\", line 110, in pyarrow.lib._handle_arrow_array_protocol\r\nFile \"\/Users\/s0s0cr3\/Library\/Python\/3.9\/lib\/python\/site-packages\/datasets\/arrow_writer.py\", line 121, in __arrow_array__\r\n out = pa.array(cast_to_python_objects(self.data, only_1d_for_numpy=True), type=type)\r\nFile \"pyarrow\/array.pxi\", line 305, in pyarrow.lib.array\r\nFile \"pyarrow\/array.pxi\", line 39, in pyarrow.lib._sequence_to_array\r\nFile \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\nFile \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.0, 1.16.1\r\n- Platform: macOS 1.15.7 or above\r\n- Python version: 3.7.12 and 3.9\r\n- PyArrow version: 3.0.0, 5.0.0, 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3346\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3345","id":1067622951,"node_id":"I_kwDODunzps4_oqIn","number":3345,"title":"Failed to download species_800 from Google Drive zip file","user":{"login":"tianjianjiang","id":4812544,"node_id":"MDQ6VXNlcjQ4MTI1NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4812544?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tianjianjiang","html_url":"https:\/\/github.com\/tianjianjiang","followers_url":"https:\/\/api.github.com\/users\/tianjianjiang\/followers","following_url":"https:\/\/api.github.com\/users\/tianjianjiang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tianjianjiang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tianjianjiang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tianjianjiang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tianjianjiang\/orgs","repos_url":"https:\/\/api.github.com\/users\/tianjianjiang\/repos","events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-11-30T20:00:28Z","updated_at":"2021-12-01T17:53:15Z","closed_at":"2021-12-01T17:53:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nOne can manually download the zip file on Google Drive, but `load_dataset()` cannot.\r\nrelated: #3248\r\n\r\n## Steps to reproduce the bug\r\n```shell\r\n> python\r\nPython 3.7.12 (default, Sep 5 2021, 08:34:29)\r\n[Clang 11.0.3 (clang-1103.0.32.62)] on darwin\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n```\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> s800 = load_dataset(\"species_800\")\r\n```\r\n\r\n## Expected results\r\nspecies_800 downloaded.\r\n\r\n## Actual results\r\n```shell\r\nDownloading: 5.68kB [00:00, 1.22MB\/s]\r\nDownloading: 2.70kB [00:00, 691kB\/s]\r\nDownloading and preparing dataset species800\/species_800 (download: 17.36 MiB, generated: 3.53 MiB, post-processed: Unknown size, total: 20.89 MiB) to \/Users\/mike\/.cache\/huggingface\/datasets\/species800\/species_800\/1.0.0\/532167f0bb8fbc0d77d6d03c4fd642c8c55527b9c5f2b1da77f3d00b0e559976...\r\n 0%| | 0\/1 [00:00\", line 1, in \r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 608, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/Users\/mike\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/species_800\/532167f0bb8fbc0d77d6d03c4fd642c8c55527b9c5f2b1da77f3d00b0e559976\/species_800.py\", line 104, in _split_generators\r\n downloaded_files = dl_manager.download_and_extract(urls_to_download)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 197, in download\r\n download_func, url_or_urls, map_tuple=True, num_proc=download_config.num_proc, disable_tqdm=False\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 209, in map_nested\r\n for obj in utils.tqdm(iterable, disable=disable_tqdm)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 209, in \r\n for obj in utils.tqdm(iterable, disable=disable_tqdm)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 143, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 305, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/Users\/mike\/Library\/Caches\/pypoetry\/virtualenvs\/promptsource-hsdAcWsQ-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 594, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/drive.google.com\/u\/0\/uc?id=1OletxmPYNkz2ltOr9pyT0b0iBtUWxslh&export=download\/\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14,0 1.15.0, 1.16.1\r\n- Platform: macOS Catalina 10.15.7\r\n- Python version: 3.7.12\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3345\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3344","id":1067567603,"node_id":"PR_kwDODunzps4vNJwd","number":3344,"title":"Add ArrayXD docs","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-30T18:53:31Z","updated_at":"2021-12-01T20:16:03Z","closed_at":"2021-12-01T19:35:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3344","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3344","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3344.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3344.patch","merged_at":"2021-12-01T19:35:32Z"},"body":"Documents support for dynamic first dimension in `ArrayXD` from #2891, and explain the `ArrayXD` feature in general. \r\n\r\nLet me know if I'm missing anything @lhoestq :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3344\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3343","id":1067505507,"node_id":"PR_kwDODunzps4vM8yB","number":3343,"title":"Better error message when download fails","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-30T17:38:50Z","updated_at":"2021-12-01T11:27:59Z","closed_at":"2021-12-01T11:27:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3343","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3343","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3343.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3343.patch","merged_at":"2021-12-01T11:27:58Z"},"body":"From our discussions in https:\/\/github.com\/huggingface\/datasets\/issues\/3269 and https:\/\/github.com\/huggingface\/datasets\/issues\/3282 it would be nice to have better messages if a download fails.\r\n\r\nIn particular the error now shows:\r\n- the error from the HEAD request if there's one\r\n- otherwise the response code of the HEAD request\r\n\r\nI also added an error to tell users to pass `use_auth_token` when the Hugging Face Hub returns 401 (Unauthorized).\r\n\r\nWhile paying around with this I also fixed a minor issue with the `force_download` parameter that was not always taken into account","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3343\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3342","id":1067481390,"node_id":"PR_kwDODunzps4vM3wh","number":3342,"title":"Fix ASSET dataset data URLs","user":{"login":"tianjianjiang","id":4812544,"node_id":"MDQ6VXNlcjQ4MTI1NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4812544?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tianjianjiang","html_url":"https:\/\/github.com\/tianjianjiang","followers_url":"https:\/\/api.github.com\/users\/tianjianjiang\/followers","following_url":"https:\/\/api.github.com\/users\/tianjianjiang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tianjianjiang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tianjianjiang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tianjianjiang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tianjianjiang\/orgs","repos_url":"https:\/\/api.github.com\/users\/tianjianjiang\/repos","events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tianjianjiang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-30T17:13:30Z","updated_at":"2021-12-14T14:50:00Z","closed_at":"2021-12-14T14:50:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3342","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3342","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3342.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3342.patch","merged_at":"2021-12-14T14:50:00Z"},"body":"Change the branch name \"master\" to \"main\" in the data URLs, since facebookresearch has changed that.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3342\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3341","id":1067449569,"node_id":"I_kwDODunzps4_n_zh","number":3341,"title":"Mirror the canonical datasets to the Hugging Face Hub","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-11-30T16:42:05Z","updated_at":"2022-01-26T14:47:37Z","closed_at":"2022-01-26T14:47:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"- [ ] create a repo on https:\/\/hf.co\/datasets for every canonical dataset\r\n- [ ] on every commit related to a dataset, update the hf.co repo\r\n\r\nSee https:\/\/github.com\/huggingface\/moon-landing\/pull\/1562\r\n\r\n@SBrandeis: I let you edit this description if needed to precise the intent.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3341\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3340","id":1067292636,"node_id":"PR_kwDODunzps4vMP6Z","number":3340,"title":"Fix JSON ClassLabel casting for integers","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-30T14:19:54Z","updated_at":"2021-12-01T11:27:30Z","closed_at":"2021-12-01T11:27:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3340","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3340","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3340.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3340.patch","merged_at":"2021-12-01T11:27:30Z"},"body":"Loading a JSON dataset with ClassLabel feature types currently fails if the JSON data already has integers. Indeed currently it tries to convert the strings to integers without even checking if the data are not integers already.\r\n\r\nFor example this currently fails:\r\n```python\r\nfrom datasets import load_dataset, Features, ClassLabel\r\n\r\npath = \"data.json\"\r\nf = Features({\"a\": ClassLabel(names=[\"neg\", \"pos\"])})\r\nd = load_dataset(\"json\", data_files=path, features=f)\r\n```\r\ndata.json\r\n```json\r\n{\"a\": 0}\r\n{\"a\": 1}\r\n```\r\n\r\nI fixed that by adding a line that checks the type of the JSON data before trying to convert them\r\n\r\ncc @albertvillanova let me know if it sounds good to you","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3340\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3339","id":1066662477,"node_id":"I_kwDODunzps4_k_pN","number":3339,"title":"to_tf_dataset fails on TPU","user":{"login":"nbroad1881","id":24982805,"node_id":"MDQ6VXNlcjI0OTgyODA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24982805?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nbroad1881","html_url":"https:\/\/github.com\/nbroad1881","followers_url":"https:\/\/api.github.com\/users\/nbroad1881\/followers","following_url":"https:\/\/api.github.com\/users\/nbroad1881\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nbroad1881\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nbroad1881\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nbroad1881\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nbroad1881\/orgs","repos_url":"https:\/\/api.github.com\/users\/nbroad1881\/repos","events_url":"https:\/\/api.github.com\/users\/nbroad1881\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nbroad1881\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-11-30T00:50:52Z","updated_at":"2021-12-02T14:21:27Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Using `to_tf_dataset` to create a dataset and then putting it in `model.fit` results in an internal error on TPUs. I've only tried on Colab and Kaggle TPUs, not GCP TPUs.\r\n\r\n\r\n## Steps to reproduce the bug\r\nI made a colab to show the error. https:\/\/colab.research.google.com\/drive\/12x_PFKzGouFxqD4OuWfnycW_1TaT276z?usp=sharing\r\n\r\n## Expected results\r\ndataset from `to_tf_dataset` works in `model.fit` \r\nRight below the first error in the colab I use `tf.data.Dataset.from_tensor_slices` and `model.fit` works just fine. This is the desired outcome.\r\n\r\n## Actual results\r\n```\r\nInternalError: 5 root error(s) found.\r\n (0) INTERNAL: {{function_node __inference_train_function_30558}} failed to connect to all addresses\r\nAdditional GRPC error information from remote target \/job:localhost\/replica:0\/task:0\/device:CPU:0:\r\n:{\"created\":\"@1638231897.932218653\",\"description\":\"Failed to pick subchannel\",\"file\":\"third_party\/grpc\/src\/core\/ext\/filters\/client_channel\/client_channel.cc\",\"file_line\":3151,\"referenced_errors\":[{\"created\":\"@1638231897.932216754\",\"description\":\"failed to connect to all addresses\",\"file\":\"third_party\/grpc\/src\/core\/lib\/transport\/error_utils.cc\",\"file_line\":161,\"grpc_status\":14}]}\r\n\t [[{{node StatefulPartitionedCall}}]]\r\n\t [[MultiDeviceIteratorGetNextFromShard]]\r\nExecuting non-communication op originally returned UnavailableError, and was replaced by InternalError to avoid invoking TF network error handling logic.\r\n\t [[RemoteCall]]\r\n\t [[IteratorGetNextAsOptional]]\r\n\t [[tpu_compile_succeeded_assert\/_14023832043698465348\/_7\/_439]]\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.16.1\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0\r\n- Tensorflow 2.7.0\r\n- `transformers` 4.12.5\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3339\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3338","id":1066371235,"node_id":"PR_kwDODunzps4vJRFM","number":3338,"title":"[WIP] Add doctests for tutorials","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-29T18:40:46Z","updated_at":"2022-01-06T19:36:03Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3338","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3338","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3338.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3338.patch","merged_at":null},"body":"Opening a PR as discussed with @LysandreJik for some help with doctest issues. The goal is to add doctests for each of the tutorials in the documentation to make sure the code samples work as shown.\r\n\r\n### Issues\r\n\r\nA doctest has been added in the docstring of the `load_dataset_builder` function in `load.py` to handle variable outputs with the `ELLIPSIS` directive. When I run doctest on the `load_hub.rst` file, doctest should recognize the expected output from the docstring, and the corresponding code sample in `load_hub.rst` should pass. I am having the same issue with handling tracebacks in the `load_dataset` function.\r\n\r\nFrom the docstring:\r\n```\r\n>>> dataset_builder.cache_dir #doctest: +ELLIPSIS\r\n\/Users\/...\/.cache\/huggingface\/datasets\/imdb\/plain_text\/1.0.0\/...\r\n```\r\nTest result:\r\n```\r\nFailed example:\r\n dataset_builder.cache_dir\r\nExpected:\r\n \/Users\/...\/.cache\/huggingface\/datasets\/imdb\/plain_text\/1.0.0\/...\r\nGot:\r\n \/Users\/steven\/.cache\/huggingface\/datasets\/imdb\/plain_text\/1.0.0\/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1\r\n```\r\n\r\nI am able to get the doctest to pass by adding the doctest directives (`ELLIPSIS` and `NORMALIZE_WHITESPACE`) to the code samples in the `rst` file directly. But my understanding is that these directives should also work in the docstrings of the functions. I am running the test from the root of the directory:\r\n\r\n```\r\npython -m doctest -v docs\/source\/load_hub.rst\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3338\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3337","id":1066232936,"node_id":"I_kwDODunzps4_jWxo","number":3337,"title":"Typing of Dataset.__getitem__ could be improved.","user":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"assignees":[{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-11-29T16:20:11Z","updated_at":"2021-12-14T10:28:54Z","closed_at":"2021-12-14T10:28:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nThe newly added typing for Dataset.__getitem__ is Union[Dict, List]. This makes tools like mypy a bit awkward to use as we need to check the type manually. We could use type overloading to make this easier. [Documentation](https:\/\/docs.python.org\/3\/library\/typing.html#typing.overload)\r\n\r\n## Steps to reproduce the bug\r\nLet's have a file `test.py`\r\n\r\n```python\r\nfrom typing import List, Dict, Any\r\n\r\nfrom datasets import Dataset\r\n\r\nds = Dataset.from_dict({\r\n 'a': [1,2,3],\r\n 'b': [\"1\", \"2\", \"3\"]\r\n})\r\n\r\none_colum: List[str] = ds['a']\r\nsome_index: Dict[Any, Any] = ds[1]\r\n```\r\n\r\n## Expected results\r\n\r\nRunning `mypy test.py` should not give any error.\r\n\r\n\r\n## Actual results\r\n\r\n```\r\ntest.py:10: error: Incompatible types in assignment (expression has type \"Union[Dict[Any, Any], List[Any]]\", variable has type \"List[str]\")\r\ntest.py:11: error: Incompatible types in assignment (expression has type \"Union[Dict[Any, Any], List[Any]]\", variable has type \"Dict[Any, Any]\")\r\nFound 2 errors in 1 file (checked 1 source file)\r\n```\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.13.3\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.8\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3337\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3336","id":1066208436,"node_id":"PR_kwDODunzps4vIwUE","number":3336,"title":"Add support for multiple dynamic dimensions and to_pandas conversion for dynamic arrays","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-29T15:58:59Z","updated_at":"2021-11-29T15:58:59Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3336","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3336","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3336.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3336.patch","merged_at":null},"body":"Add support for multiple dynamic dimensions (e.g. `(None, None, 3)` for arbitrary sized images) and `to_pandas()` conversion for dynamic arrays.\r\n\r\nTODOs:\r\n* [ ] Cleaner code\r\n* [ ] Formatting issues (if NumPy doesn't allow broadcasting even though dtype is np.object)\r\n* [ ] Fix some issues with zero-dim tensors \r\n* [ ] Tests\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3336\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3335","id":1066064126,"node_id":"PR_kwDODunzps4vISGy","number":3335,"title":"add Speech commands dataset","user":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2021-11-29T13:52:47Z","updated_at":"2021-12-10T10:37:21Z","closed_at":"2021-12-10T10:30:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3335","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3335","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3335.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3335.patch","merged_at":"2021-12-10T10:30:15Z"},"body":"closes #3283","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3335\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3334","id":1065983923,"node_id":"I_kwDODunzps4_iZ-z","number":3334,"title":"Integrate Polars library","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-29T12:31:54Z","updated_at":"2021-11-29T13:01:12Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Check potential integration of the Polars library: https:\/\/github.com\/pola-rs\/polars\r\n- Benchmark: https:\/\/h2oai.github.io\/db-benchmark\/\r\n\r\nCC: @thomwolf @lewtun \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3334\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3333","id":1065346919,"node_id":"I_kwDODunzps4_f-dn","number":3333,"title":" load JSON files, get the errors","user":{"login":"yanllearnn","id":38966558,"node_id":"MDQ6VXNlcjM4OTY2NTU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38966558?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yanllearnn","html_url":"https:\/\/github.com\/yanllearnn","followers_url":"https:\/\/api.github.com\/users\/yanllearnn\/followers","following_url":"https:\/\/api.github.com\/users\/yanllearnn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yanllearnn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yanllearnn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yanllearnn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yanllearnn\/orgs","repos_url":"https:\/\/api.github.com\/users\/yanllearnn\/repos","events_url":"https:\/\/api.github.com\/users\/yanllearnn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yanllearnn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2021-11-28T14:29:58Z","updated_at":"2021-12-01T09:34:31Z","closed_at":"2021-12-01T03:57:48Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, does this bug be fixed? when I load JSON files, I get the same errors by the command \r\n`!python3 run.py --do_train --task qa --dataset squad-retrain-data\/train-v2.0.json --output_dir .\/re_trained_model\/`\r\n\r\nchange the dateset to load json by refering to https:\/\/huggingface.co\/docs\/datasets\/loading.html\r\n`dataset = datasets.load_dataset('json', data_files=args.dataset)`\r\n\r\nErrors:\r\n`Downloading and preparing dataset json\/default (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/root\/.cache\/huggingface\/datasets\/json\/default-c1e124ad488911b8\/0.0.0\/45636811569ec4a6630521c18235dfbbab83b7ab572e3393c5ba68ccabe98264...\r\n`\r\n\r\n_Originally posted by @yanllearnn in https:\/\/github.com\/huggingface\/datasets\/issues\/730#issuecomment-981095050_","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3333\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3332","id":1065345853,"node_id":"PR_kwDODunzps4vGBig","number":3332,"title":"Fix error message and add extension fallback","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-28T14:25:29Z","updated_at":"2021-11-29T13:34:15Z","closed_at":"2021-11-29T13:34:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3332","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3332","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3332.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3332.patch","merged_at":"2021-11-29T13:34:14Z"},"body":"Fix the error message raised if `infered_module_name` is `None` in `CommunityDatasetModuleFactoryWithoutScript.get_module` and make `infer_module_for_data_files` more robust. \r\n\r\nIn the linked issue, `infer_module_for_data_files` returns `None` because `json` is the second most common extension due to the suffix ordering. Now, we go from the most common to the least common extension and try to map it or return `None`.\r\n\r\nFix #3331","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3332\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3331","id":1065275896,"node_id":"I_kwDODunzps4_ftH4","number":3331,"title":"AttributeError: 'CommunityDatasetModuleFactoryWithoutScript' object has no attribute 'path'","user":{"login":"luozhouyang","id":34032031,"node_id":"MDQ6VXNlcjM0MDMyMDMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34032031?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/luozhouyang","html_url":"https:\/\/github.com\/luozhouyang","followers_url":"https:\/\/api.github.com\/users\/luozhouyang\/followers","following_url":"https:\/\/api.github.com\/users\/luozhouyang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/luozhouyang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/luozhouyang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/luozhouyang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/luozhouyang\/orgs","repos_url":"https:\/\/api.github.com\/users\/luozhouyang\/repos","events_url":"https:\/\/api.github.com\/users\/luozhouyang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/luozhouyang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-28T08:54:05Z","updated_at":"2021-11-29T13:49:44Z","closed_at":"2021-11-29T13:34:14Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI add a new question answering dataset to huggingface datasets manually. Here is the link: [luozhouyang\/question-answering-datasets](https:\/\/huggingface.co\/datasets\/luozhouyang\/question-answering-datasets)\r\n\r\nBut when I load the dataset, an error raised: \r\n\r\n```bash\r\nAttributeError: 'CommunityDatasetModuleFactoryWithoutScript' object has no attribute 'path'\r\n```\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"luozhouyang\/question-answering-datasets\", data_files=[\"dureader_robust.train.json\"])\r\n```\r\n\r\n## Expected results\r\nLoad dataset successfully without any error.\r\n\r\n## Actual results\r\n```bash\r\nTraceback (most recent call last):\r\n File \"\/mnt\/home\/zhouyang.lzy\/github\/naivenlp\/naivenlp\/tests\/question_answering_tests\/dataset_test.py\", line 89, in test_load_dataset_with_hf\r\n data_files=[\"dureader_robust.train.json\"],\r\n File \"\/mnt\/home\/zhouyang.lzy\/.conda\/envs\/naivenlp\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 1616, in load_dataset\r\n **config_kwargs,\r\n File \"\/mnt\/home\/zhouyang.lzy\/.conda\/envs\/naivenlp\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 1443, in load_dataset_builder\r\n path, revision=revision, download_config=download_config, download_mode=download_mode, data_files=data_files\r\n File \"\/mnt\/home\/zhouyang.lzy\/.conda\/envs\/naivenlp\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 1157, in dataset_module_factory\r\n raise e1 from None\r\n File \"\/mnt\/home\/zhouyang.lzy\/.conda\/envs\/naivenlp\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 1144, in dataset_module_factory\r\n download_mode=download_mode,\r\n File \"\/mnt\/home\/zhouyang.lzy\/.conda\/envs\/naivenlp\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 798, in get_module\r\n raise FileNotFoundError(f\"No data files or dataset script found in {self.path}\")\r\nAttributeError: 'CommunityDatasetModuleFactoryWithoutScript' object has no attribute 'path'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: linux\r\n- Python version: 3.6.13\r\n- PyArrow version: 6.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3331\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3330","id":1065176619,"node_id":"PR_kwDODunzps4vFtF7","number":3330,"title":"Change TriviaQA license (#3313)","user":{"login":"avinashsai","id":22453634,"node_id":"MDQ6VXNlcjIyNDUzNjM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22453634?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/avinashsai","html_url":"https:\/\/github.com\/avinashsai","followers_url":"https:\/\/api.github.com\/users\/avinashsai\/followers","following_url":"https:\/\/api.github.com\/users\/avinashsai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/avinashsai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/avinashsai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/avinashsai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/avinashsai\/orgs","repos_url":"https:\/\/api.github.com\/users\/avinashsai\/repos","events_url":"https:\/\/api.github.com\/users\/avinashsai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/avinashsai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-28T03:26:45Z","updated_at":"2021-11-29T11:24:21Z","closed_at":"2021-11-29T11:24:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3330","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3330","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3330.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3330.patch","merged_at":"2021-11-29T11:24:21Z"},"body":"Fixes (#3313)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3330\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3329","id":1065096971,"node_id":"I_kwDODunzps4_fBcL","number":3329,"title":"Map function: Type error on iter #999","user":{"login":"josephkready666","id":52659318,"node_id":"MDQ6VXNlcjUyNjU5MzE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52659318?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/josephkready666","html_url":"https:\/\/github.com\/josephkready666","followers_url":"https:\/\/api.github.com\/users\/josephkready666\/followers","following_url":"https:\/\/api.github.com\/users\/josephkready666\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/josephkready666\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/josephkready666\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/josephkready666\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/josephkready666\/orgs","repos_url":"https:\/\/api.github.com\/users\/josephkready666\/repos","events_url":"https:\/\/api.github.com\/users\/josephkready666\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/josephkready666\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-11-27T17:53:05Z","updated_at":"2021-11-29T20:40:15Z","closed_at":"2021-11-29T20:40:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nUsing the map function, it throws a type error on iter #999\r\n\r\nHere is the code I am calling:\r\n```\r\ndataset = datasets.load_dataset('squad')\r\ndataset['validation'].map(text_numbers_to_int, input_columns=['context'], fn_kwargs={'column': 'context'})\r\n``` \r\ntext_numbers_to_int returns the input text with numbers replaced in the format {'context': text}\r\n\r\nIt happens at \r\n`\r\nFile \"C:\\Users\\lonek\\anaconda3\\envs\\ai\\Lib\\site-packages\\datasets\\arrow_writer.py\", line 289, in \r\n [row[0][col] for row in self.current_examples], type=col_type, try_type=col_try_type, col=col\r\n`\r\n\r\nThe issue is that the list comprehension expects self.current_examples to be type tuple(dict, str), but for some reason 26 out of 1000 of the sefl.current_examples are type tuple(str, str)\r\n\r\nHere is an example of what self.current_examples should be\r\n({'context': 'Super Bowl 50 was an...merals 50.'}, '')\r\n\r\nHere is an example of what self.current_examples are when it throws the error:\r\n('The Panthers used th... Marriott.', '')\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3329\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3328","id":1065015262,"node_id":"PR_kwDODunzps4vFTpW","number":3328,"title":"Quick fix error formatting","user":{"login":"NouamaneTazi","id":29777165,"node_id":"MDQ6VXNlcjI5Nzc3MTY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29777165?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NouamaneTazi","html_url":"https:\/\/github.com\/NouamaneTazi","followers_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/followers","following_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/orgs","repos_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/repos","events_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NouamaneTazi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-27T11:47:48Z","updated_at":"2021-11-29T13:32:42Z","closed_at":"2021-11-29T13:32:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3328","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3328","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3328.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3328.patch","merged_at":"2021-11-29T13:32:42Z"},"body":"While working on a dataset, I got the error\r\n```\r\nTypeError: Provided `function` which is applied to all elements of table returns a `dict` of types {[type(x) for x in processed_inputs.values()]}. When using `batched=True`, make sure provided `function` returns a `dict` of types like `{allowed_batch_return_types}`.\r\n```\r\n\r\nThis PR should fix the formatting of this error","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3328\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3327","id":1064675888,"node_id":"I_kwDODunzps4_daow","number":3327,"title":"\"Shape of query is incorrect, it has to be either a 1D array or 2D (1, N)\"","user":{"login":"eliasws","id":19492473,"node_id":"MDQ6VXNlcjE5NDkyNDcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19492473?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eliasws","html_url":"https:\/\/github.com\/eliasws","followers_url":"https:\/\/api.github.com\/users\/eliasws\/followers","following_url":"https:\/\/api.github.com\/users\/eliasws\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eliasws\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eliasws\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eliasws\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eliasws\/orgs","repos_url":"https:\/\/api.github.com\/users\/eliasws\/repos","events_url":"https:\/\/api.github.com\/users\/eliasws\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eliasws\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-26T16:26:36Z","updated_at":"2021-11-26T16:44:11Z","closed_at":"2021-11-26T16:44:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nPassing a correctly shaped Numpy-Array to get_nearest_examples leads to the Exception\r\n\r\n\"Shape of query is incorrect, it has to be either a 1D array or 2D (1, N)\"\r\n\r\nProbably the reason for this is a wrongly converted assertion.\r\n\r\n1.15.1:\r\n\r\n`assert len(query.shape) == 1 or (len(query.shape) == 2 and query.shape[0] == 1)`\r\n\r\n1.16.1:\r\n\r\n```\r\n if len(query.shape) != 1 or (len(query.shape) == 2 and query.shape[0] != 1):\r\n raise ValueError(\"Shape of query is incorrect, it has to be either a 1D array or 2D (1, N)\")\r\n```\r\n\r\n## Steps to reproduce the bug\r\n\r\nfollow the steps described here: https:\/\/huggingface.co\/course\/chapter5\/6?fw=tf\r\n\r\n```python\r\n question_embedding.shape # (1, 768)\r\n\r\n scores, samples = embeddings_dataset.get_nearest_examples(\r\n \"embeddings\", question_embedding, k=5 # Error\r\n)\r\n\r\n# \"Shape of query is incorrect, it has to be either a 1D array or 2D (1, N)\"\r\n```\r\n\r\n## Expected results\r\nShould work without exception\r\n\r\n## Actual results\r\nThrows exception\r\n\r\n## Environment info\r\n- `datasets` version: 1.15.1\r\n- Platform: Darwin-20.6.0-x86_64-i386-64bit\r\n- Python version: 3.7.12\r\n- PyArrow version: 6.0.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3327\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3326","id":1064664479,"node_id":"PR_kwDODunzps4vEaYG","number":3326,"title":"Fix import `datasets` on python 3.10","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-26T16:10:00Z","updated_at":"2021-11-26T16:31:23Z","closed_at":"2021-11-26T16:31:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3326","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3326","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3326.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3326.patch","merged_at":"2021-11-26T16:31:23Z"},"body":"In python 3.10 it's no longer possible to use `functools.wraps` on a method decorated with `classmethod`.\r\nTo fix this I inverted the order of the `inject_arrow_table_documentation` and `classmethod` decorators\r\n\r\nFix #3324 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3326\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3325","id":1064663075,"node_id":"PR_kwDODunzps4vEaGO","number":3325,"title":"Update conda dependencies","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-26T16:08:07Z","updated_at":"2021-11-26T16:20:37Z","closed_at":"2021-11-26T16:20:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3325","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3325","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3325.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3325.patch","merged_at":"2021-11-26T16:20:36Z"},"body":"Some dependencies minimum versions were outdated. For example `pyarrow` and `huggingface_hub`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3325\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3324","id":1064661212,"node_id":"I_kwDODunzps4_dXDc","number":3324,"title":"Can't import `datasets` in python 3.10","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-11-26T16:06:14Z","updated_at":"2021-11-26T16:31:23Z","closed_at":"2021-11-26T16:31:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When importing `datasets` I'm getting this error in python 3.10:\r\n```python\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/__init__.py\", line 34, in \r\n from .arrow_dataset import Dataset, concatenate_datasets\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/arrow_dataset.py\", line 47, in \r\n from .arrow_reader import ArrowReader\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/arrow_reader.py\", line 33, in \r\n from .table import InMemoryTable, MemoryMappedTable, Table, concat_tables\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/table.py\", line 334, in \r\n class InMemoryTable(TableBlock):\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/table.py\", line 361, in InMemoryTable\r\n def from_pandas(cls, *args, **kwargs):\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/table.py\", line 24, in wrapper\r\n out = wraps(arrow_table_method)(method)\r\n File \"\/Users\/quentinlhoest\/.pyenv\/versions\/3.10.0\/lib\/python3.10\/functools.py\", line 61, in update_wrapper\r\n wrapper.__wrapped__ = wrapped\r\nAttributeError: readonly attribute\r\n```\r\n\r\nThis makes the conda build fail.\r\nI'm opening a PR to fix this and do a patch release 1.16.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3324\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3323","id":1064660452,"node_id":"PR_kwDODunzps4vEZwq","number":3323,"title":"Fix wrongly converted assert","user":{"login":"eliasws","id":19492473,"node_id":"MDQ6VXNlcjE5NDkyNDcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19492473?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eliasws","html_url":"https:\/\/github.com\/eliasws","followers_url":"https:\/\/api.github.com\/users\/eliasws\/followers","following_url":"https:\/\/api.github.com\/users\/eliasws\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eliasws\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eliasws\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eliasws\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eliasws\/orgs","repos_url":"https:\/\/api.github.com\/users\/eliasws\/repos","events_url":"https:\/\/api.github.com\/users\/eliasws\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eliasws\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-26T16:05:39Z","updated_at":"2021-11-26T16:44:12Z","closed_at":"2021-11-26T16:44:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3323","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3323","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3323.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3323.patch","merged_at":"2021-11-26T16:44:11Z"},"body":"Seems like this assertion was replaced by an exception but the condition got wrongly converted.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3323\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3322","id":1064429705,"node_id":"PR_kwDODunzps4vD1Ct","number":3322,"title":"Add missing tags to XTREME","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-26T12:37:05Z","updated_at":"2021-11-29T13:40:07Z","closed_at":"2021-11-29T13:40:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3322","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3322","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3322.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3322.patch","merged_at":"2021-11-29T13:40:06Z"},"body":"Add missing tags to the XTREME benchmark for better discoverability.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3322\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3321","id":1063858386,"node_id":"PR_kwDODunzps4vCBeI","number":3321,"title":"Update URL of tatoeba subset of xtreme","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-25T18:42:31Z","updated_at":"2021-11-26T10:30:30Z","closed_at":"2021-11-26T10:30:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3321","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3321","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3321.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3321.patch","merged_at":"2021-11-26T10:30:29Z"},"body":"Updates the URL of the tatoeba subset of xtreme. Additionally, replaces `os.path.join` with `xjoin` to correctly join the URL segments on Windows.\r\n\r\nFix #3320 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3321\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3320","id":1063531992,"node_id":"I_kwDODunzps4_ZDXY","number":3320,"title":"Can't get tatoeba.rus dataset","user":{"login":"mmg10","id":65535131,"node_id":"MDQ6VXNlcjY1NTM1MTMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/65535131?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mmg10","html_url":"https:\/\/github.com\/mmg10","followers_url":"https:\/\/api.github.com\/users\/mmg10\/followers","following_url":"https:\/\/api.github.com\/users\/mmg10\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mmg10\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mmg10\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mmg10\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mmg10\/orgs","repos_url":"https:\/\/api.github.com\/users\/mmg10\/repos","events_url":"https:\/\/api.github.com\/users\/mmg10\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mmg10\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-25T12:31:11Z","updated_at":"2021-11-26T10:30:29Z","closed_at":"2021-11-26T10:30:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIt gives an error.\r\n\r\n> FileNotFoundError: Couldn't find file at https:\/\/github.com\/facebookresearch\/LASER\/raw\/master\/data\/tatoeba\/v1\/tatoeba.rus-eng.rus\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndata=load_dataset(\"xtreme\",\"tatoeba.rus\", split=\"validation\")\r\n```\r\n\r\n## Solution\r\nThe library tries to access the **master** branch. In the github repo of facebookresearch, it is in the **main** branch.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3320\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3319","id":1062749654,"node_id":"PR_kwDODunzps4u-xdv","number":3319,"title":"Add push_to_hub docs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-24T18:21:11Z","updated_at":"2021-11-25T14:47:46Z","closed_at":"2021-11-25T14:47:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3319","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3319","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3319.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3319.patch","merged_at":"2021-11-25T14:47:46Z"},"body":"Since #3098 it's now possible to upload a dataset on the Hub directly from python using the `push_to_hub` method.\r\nI just added a section in the \"Upload a dataset to the Hub\" tutorial.\r\n\r\nI kept the section quite simple but let me know if it sounds good to you @LysandreJik @stevhliu :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3319\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3318","id":1062369717,"node_id":"PR_kwDODunzps4u9m-k","number":3318,"title":"Finish transition to PyArrow 3.0.0","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-24T12:30:14Z","updated_at":"2021-11-24T15:35:05Z","closed_at":"2021-11-24T15:35:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3318","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3318","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3318.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3318.patch","merged_at":"2021-11-24T15:35:04Z"},"body":"Finish transition to PyArrow 3.0.0 that was started in #3098.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3318\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3317","id":1062284447,"node_id":"I_kwDODunzps4_USyf","number":3317,"title":"Add desc parameter to Dataset filter method","user":{"login":"vblagoje","id":458335,"node_id":"MDQ6VXNlcjQ1ODMzNQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/458335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vblagoje","html_url":"https:\/\/github.com\/vblagoje","followers_url":"https:\/\/api.github.com\/users\/vblagoje\/followers","following_url":"https:\/\/api.github.com\/users\/vblagoje\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vblagoje\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vblagoje\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vblagoje\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vblagoje\/orgs","repos_url":"https:\/\/api.github.com\/users\/vblagoje\/repos","events_url":"https:\/\/api.github.com\/users\/vblagoje\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vblagoje\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-11-24T11:01:36Z","updated_at":"2022-01-05T18:31:24Z","closed_at":"2022-01-05T18:31:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nAs I was filtering very large datasets I noticed the filter method doesn't have the desc parameter which is available in the map method. Why don't we add a desc parameter to the filter method both for consistency and it's nice to give some feedback to users during long operations on Datasets?\r\n\r\n**Describe the solution you'd like**\r\nAdd desc parameter to Dataset filter method\r\n\r\n**Describe alternatives you've considered**\r\nN\/A\r\n\r\n**Additional context**\r\nN\/A\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3317\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3316","id":1062185822,"node_id":"I_kwDODunzps4_T6te","number":3316,"title":"Add RedCaps dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-11-24T09:23:02Z","updated_at":"2022-01-12T14:13:15Z","closed_at":"2022-01-12T14:13:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** RedCaps\r\n- **Description:** Web-curated image-text data created by the people, for the people\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2111.11431\r\n- **Data:** https:\/\/redcaps.xyz\/\r\n- **Motivation:** Multimodal image-text dataset: 12M+ Image-text pairs \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nProposed by @patil-suraj ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3316\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3315","id":1061678452,"node_id":"PR_kwDODunzps4u7WpU","number":3315,"title":"Removing query params for dynamic URL caching","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-11-23T20:24:12Z","updated_at":"2021-11-25T14:44:32Z","closed_at":"2021-11-25T14:44:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3315","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3315","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3315.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3315.patch","merged_at":"2021-11-25T14:44:31Z"},"body":"The main use case for this is to make dynamically generated private URLs (like the ones returned by CommonVoice API) compatible with the datasets' caching logic.\r\n\r\nUsage example:\r\n```python\r\nimport datasets\r\n\r\nclass CommonVoice(datasets.GeneratorBasedBuilder):\r\n def _info(self):\r\n return datasets.DatasetInfo()\r\n\r\n def _split_generators(self, dl_manager):\r\n dl_manager.download_config.ignore_url_params = True\r\n HUGE_URL = \"https:\/\/mozilla-common-voice-datasets.s3.dualstack.us-west-2.amazonaws.com\/cv-corpus-7.0-2021-07-21\/cv-corpus-7.0-2021-07-21-ab.tar.gz?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=ASIAQ3GQRTO3IU5JYB5K%2F20211125%2Fus-west-2%2Fs3%2Faws4_request&X-Amz-Date=20211125T131423Z&X-Amz-Expires=43200&X-Amz-Security-Token=FwoGZXIvYXdzEL7%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEaDLsZw7Nj0d9h4rgheyKSBJJ6bxo1JdWLXAUhLMrUB8AXfhP8Ge4F8dtjwXmvGJgkIvdMT7P4YOEE1pS3mW8AyKsz7Z7IRVCIGQrOH1AbxGVVcDoCMMswXEOqL3nJFihKLf99%2F6l8iJVZdzftRUNgMhX5Hz0xSIL%2BzRDpH5nYa7C6YpEdOdW81CFVXybx7WUrX13wc8X4ZlUj7zrWcWf5p2VEIU5Utb7YHVi0Y5TQQiZSDoedQl0j4VmMuFkDzoobIO%2BvilgGeE2kIX0E62X423mEGNu4uQV5JsOuLAtv3GVlemsqEH3ZYrXDuxLmnvGj5HfMtySwI4vKv%2BlnnirD29o7hxvtidXiA8JMWhp93aP%2Fw7sod%2BPPbb5EqP%2B4Qb2GJ1myClOKcLEY0cqoy7XWm8NeVljLJojnFJVS5mNFBAzCCTJ%2FidxNsj8fflzkRoAzYaaPBuOTL1dgtZCdslK3FAuEvw0cik7P9A7IYiULV33otSHKMPcVfNHFsWQljs03gDztsIUWxaXvu6ck5vCcGULsHbfe6xoMPm2bR9jtKLONsslPcnzWIf7%2Fch2w%2F%2BjtTCd9IxaH4kytyJ6mIjpV%2FA%2F2h9qeDnDFsCphnMjAzPQn6tqCgTtPcyJ2b8c94ncgUnE4mepx%2FDa%2FanAEsrg9RPdmbdoPswzHn1IClh91IfSN74u95DZUxlPeZrHG5HxVCN3dKO6j%2Ft1xd20L0hEtazDdKOr8%2FYwGMirp8rp%2BII0pYOwQOrYHqH%2FREX2dRJctJtwE86Qj1eU8BAdXuFIkLC4NWXw%3D&X-Amz-Signature=1b8108d29b0e9c2bf6c7246e58ca8d5749a83de0704757ad8e8a44d78194691f&X-Amz-SignedHeaders=host\"\r\n dl_path = dl_manager.download_and_extract(HUGE_URL)\r\n print(dl_path)\r\n \r\n HUGE_URL += \"&some_new_or_changed_param=12345\"\r\n dl_path = dl_manager.download_and_extract(HUGE_URL)\r\n print(dl_path)\r\n\r\ndl_manager = datasets.DownloadManager(dataset_name=\"common_voice\")\r\nCommonVoice()._split_generators(dl_manager)\r\n```\r\n\r\nOutput:\r\n```\r\n\/home\/user\/.cache\/huggingface\/datasets\/downloads\/6ef2a377398ff3309554be040caa78414e6562d623dbd0ce8fc262459a7f8ec6\r\n\/home\/user\/.cache\/huggingface\/datasets\/downloads\/6ef2a377398ff3309554be040caa78414e6562d623dbd0ce8fc262459a7f8ec6\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3315\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3314","id":1061448227,"node_id":"PR_kwDODunzps4u6mdX","number":3314,"title":"Adding arg to pass process rank to `map`","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-23T15:55:21Z","updated_at":"2021-11-24T11:54:13Z","closed_at":"2021-11-24T11:54:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3314","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3314","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3314.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3314.patch","merged_at":"2021-11-24T11:54:13Z"},"body":"This PR adds a `with_rank` argument to `map` that gives the user the possibility to pass the rank of each process to their function. This is mostly designed for multi-GPU map (each process can be sent to a different device thanks to the rank). I've also added tests. I'm putting the PR up so you can check the code, I'll add a multi-GPU example to the doc (+ write a bit in the doc for the new arg)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3314\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3313","id":1060933392,"node_id":"I_kwDODunzps4_PI8Q","number":3313,"title":"TriviaQA License Mismatch","user":{"login":"akhilkedia","id":16665267,"node_id":"MDQ6VXNlcjE2NjY1MjY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16665267?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/akhilkedia","html_url":"https:\/\/github.com\/akhilkedia","followers_url":"https:\/\/api.github.com\/users\/akhilkedia\/followers","following_url":"https:\/\/api.github.com\/users\/akhilkedia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/akhilkedia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/akhilkedia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/akhilkedia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/akhilkedia\/orgs","repos_url":"https:\/\/api.github.com\/users\/akhilkedia\/repos","events_url":"https:\/\/api.github.com\/users\/akhilkedia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/akhilkedia\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-23T08:00:15Z","updated_at":"2021-11-29T11:24:21Z","closed_at":"2021-11-29T11:24:21Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nTriviaQA Webpage at http:\/\/nlp.cs.washington.edu\/triviaqa\/ says they do not own the copyright to the data. However, Huggingface datasets at https:\/\/huggingface.co\/datasets\/trivia_qa mentions that the dataset is released under Apache License\r\n\r\nIs the License Information on HuggingFace correct?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3313\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3312","id":1060440346,"node_id":"PR_kwDODunzps4u3duV","number":3312,"title":"add bl books genre dataset","user":{"login":"davanstrien","id":8995957,"node_id":"MDQ6VXNlcjg5OTU5NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8995957?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/davanstrien","html_url":"https:\/\/github.com\/davanstrien","followers_url":"https:\/\/api.github.com\/users\/davanstrien\/followers","following_url":"https:\/\/api.github.com\/users\/davanstrien\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/davanstrien\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/davanstrien\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/davanstrien\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/davanstrien\/orgs","repos_url":"https:\/\/api.github.com\/users\/davanstrien\/repos","events_url":"https:\/\/api.github.com\/users\/davanstrien\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/davanstrien\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-11-22T17:54:50Z","updated_at":"2021-12-02T16:10:29Z","closed_at":"2021-12-02T16:07:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3312","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3312","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3312.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3312.patch","merged_at":"2021-12-02T16:07:47Z"},"body":"First of all thanks for the fantastic library\/collection of datasets \ud83e\udd17\r\n\r\nThis pull request adds a dataset of metadata from digitised (mostly 19th Century) books from the British Library The [data](https:\/\/bl.iro.bl.uk\/concern\/datasets\/1e1ccb46-65b4-4481-b6f8-b8129d5da053) contains various metadata about the books. In addition, a subset of the data includes 'genre' information which can be used for supervised text classification tasks. I hope that this offers easier access to a dataset for doing text classification on GLAM (galleries, libraries, archives and museums) data. \r\n\r\nI have tried to create three configurations that provide both an 'easy' version of the dataset if you want to use it for training a genre classification model and a more 'raw' version of the data for other potential use cases for the data. I am open to suggestions if this doesn't make sense. \r\n\r\nSimilarly, for some of the arrow datatypes, I have had to fall back to strings since there are missing values for some fields\/rows but I may have missed a more elegant way of dealing with it. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3312\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3311","id":1060387957,"node_id":"I_kwDODunzps4_NDx1","number":3311,"title":"Add WebSRC","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-22T16:58:33Z","updated_at":"2021-11-22T16:58:33Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** WebSRC\r\n- **Description:** WebSRC is a novel Web-based Structural Reading Comprehension dataset. It consists of 0.44M question-answer pairs, which are collected from 6.5K web pages with corresponding HTML source code, screenshots and metadata. \r\n- **Paper:** https:\/\/arxiv.org\/abs\/2101.09465\r\n- **Data:** https:\/\/x-lance.github.io\/WebSRC\/dashboard.html#\r\n- **Motivation:** Currently adding MarkupLM to HuggingFace Transformers, which achieves SOTA on this dataset.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3311\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3310","id":1060098104,"node_id":"I_kwDODunzps4_L9A4","number":3310,"title":"Fatal error condition occurred in aws-c-io","user":{"login":"Crabzmatic","id":31850219,"node_id":"MDQ6VXNlcjMxODUwMjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31850219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Crabzmatic","html_url":"https:\/\/github.com\/Crabzmatic","followers_url":"https:\/\/api.github.com\/users\/Crabzmatic\/followers","following_url":"https:\/\/api.github.com\/users\/Crabzmatic\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Crabzmatic\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Crabzmatic\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Crabzmatic\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Crabzmatic\/orgs","repos_url":"https:\/\/api.github.com\/users\/Crabzmatic\/repos","events_url":"https:\/\/api.github.com\/users\/Crabzmatic\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Crabzmatic\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-11-22T12:27:54Z","updated_at":"2021-12-17T09:27:25Z","closed_at":"2021-11-29T22:22:37Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nFatal error when using the library\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('wikiann', 'en')\r\n```\r\n\r\n## Expected results\r\nNo fatal errors\r\n\r\n## Actual results\r\n```\r\nFatal error condition occurred in D:\\bld\\aws-c-io_1633633258269\\work\\source\\event_loop.c:74: aws_thread_launch(&cleanup_thread, s_event_loop_destroy_async_thread_fn, el_group, &thread_options) == AWS_OP_SUCCESS\r\nExiting Application\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.15.2.dev0\r\n- Platform: Windows-10-10.0.22504-SP0\r\n- Python version: 3.8.12\r\n- PyArrow version: 6.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3310\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3309","id":1059496154,"node_id":"PR_kwDODunzps4u0Xgm","number":3309,"title":"fix: files counted twice in inferred structure","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-11-21T21:50:38Z","updated_at":"2021-11-23T17:00:58Z","closed_at":"2021-11-23T17:00:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3309","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3309","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3309.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3309.patch","merged_at":"2021-11-23T17:00:58Z"},"body":"Files were counted twice in a structure like:\r\n```\r\nmy_dataset_local_path\/\r\n\u251c\u2500\u2500 README.md\r\n\u2514\u2500\u2500 data\/\r\n \u251c\u2500\u2500 train\/\r\n \u2502 \u251c\u2500\u2500 shard_0.csv\r\n \u2502 \u251c\u2500\u2500 shard_1.csv\r\n \u2502 \u251c\u2500\u2500 shard_2.csv\r\n \u2502 \u2514\u2500\u2500 shard_3.csv\r\n \u2514\u2500\u2500 valid\/\r\n \u251c\u2500\u2500 shard_0.csv\r\n \u2514\u2500\u2500 shard_1.csv\r\n```\r\n\r\nThe reason is that they were matching both `*train*\/*` and `*train*\/**\/*`.\r\n\r\nThis PR fixes it. @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3309\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3308","id":1059255705,"node_id":"I_kwDODunzps4_IvWZ","number":3308,"title":"\"dataset_infos.json\" missing for chr_en and mc4","user":{"login":"amitness","id":8587189,"node_id":"MDQ6VXNlcjg1ODcxODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8587189?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amitness","html_url":"https:\/\/github.com\/amitness","followers_url":"https:\/\/api.github.com\/users\/amitness\/followers","following_url":"https:\/\/api.github.com\/users\/amitness\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amitness\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amitness\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amitness\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amitness\/orgs","repos_url":"https:\/\/api.github.com\/users\/amitness\/repos","events_url":"https:\/\/api.github.com\/users\/amitness\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amitness\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-11-21T00:07:22Z","updated_at":"2022-01-19T13:55:32Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIn the repository, every dataset has its metadata in a file called`dataset_infos.json`. But, this file is missing for two datasets: `chr_en` and `mc4`.\r\n\r\n## Steps to reproduce the bug\r\nCheck [chr_en](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/chr_en) and [mc4](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/mc4)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3308\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3307","id":1059226297,"node_id":"PR_kwDODunzps4uzlWa","number":3307,"title":"Add IndoNLI dataset","user":{"login":"afaji","id":6201626,"node_id":"MDQ6VXNlcjYyMDE2MjY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6201626?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/afaji","html_url":"https:\/\/github.com\/afaji","followers_url":"https:\/\/api.github.com\/users\/afaji\/followers","following_url":"https:\/\/api.github.com\/users\/afaji\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/afaji\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/afaji\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/afaji\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/afaji\/orgs","repos_url":"https:\/\/api.github.com\/users\/afaji\/repos","events_url":"https:\/\/api.github.com\/users\/afaji\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/afaji\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-20T20:46:03Z","updated_at":"2021-11-25T14:51:48Z","closed_at":"2021-11-25T14:51:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3307","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3307","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3307.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3307.patch","merged_at":"2021-11-25T14:51:48Z"},"body":"This PR adds IndoNLI dataset, from https:\/\/aclanthology.org\/2021.emnlp-main.821\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3307\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3306","id":1059185860,"node_id":"I_kwDODunzps4_IeTE","number":3306,"title":"nested sequence feature won't encode example if the first item of the outside sequence is an empty list","user":{"login":"function2-llx","id":38486514,"node_id":"MDQ6VXNlcjM4NDg2NTE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38486514?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/function2-llx","html_url":"https:\/\/github.com\/function2-llx","followers_url":"https:\/\/api.github.com\/users\/function2-llx\/followers","following_url":"https:\/\/api.github.com\/users\/function2-llx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/function2-llx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/function2-llx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/function2-llx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/function2-llx\/orgs","repos_url":"https:\/\/api.github.com\/users\/function2-llx\/repos","events_url":"https:\/\/api.github.com\/users\/function2-llx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/function2-llx\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-11-20T16:57:54Z","updated_at":"2021-12-08T13:02:15Z","closed_at":"2021-12-08T13:02:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAs the title, nested sequence feature won't encode example if the first item of the outside sequence is an empty list.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import Features, Sequence, ClassLabel\r\nfeatures = Features({\r\n 'x': Sequence(Sequence(ClassLabel(names=['a', 'b']))),\r\n})\r\nprint(features.encode_batch({\r\n 'x': [\r\n [['a'], ['b']],\r\n [[], ['b']],\r\n ]\r\n}))\r\n```\r\n\r\n## Expected results\r\nprint `{'x': [[[0], [1]], [[], ['1']]]}`\r\n\r\n## Actual results\r\nprint `{'x': [[[0], [1]], [[], ['b']]]}`\r\n\r\n## Environment info\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-5.13.0-21-generic-x86_64-with-glibc2.34\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.0\r\n\r\n## Additional information\r\nI think the issue stems from [here](https:\/\/github.com\/huggingface\/datasets\/blob\/8555197a3fe826e98bd0206c2d031c4488c53c5c\/src\/datasets\/features\/features.py#L847-L848).\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3306\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3305","id":1059161000,"node_id":"PR_kwDODunzps4uzZWv","number":3305,"title":"asserts replaced with exception for ``fingerprint.py``, ``search.py``, ``arrow_writer.py`` and ``metric.py``","user":{"login":"Ishan-Kumar2","id":46553104,"node_id":"MDQ6VXNlcjQ2NTUzMTA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46553104?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Ishan-Kumar2","html_url":"https:\/\/github.com\/Ishan-Kumar2","followers_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/followers","following_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/orgs","repos_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/repos","events_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-20T14:51:23Z","updated_at":"2021-11-22T18:24:32Z","closed_at":"2021-11-22T17:08:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3305","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3305","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3305.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3305.patch","merged_at":"2021-11-22T17:08:13Z"},"body":"Addresses #3171 \r\nFixes exception for ``fingerprint.py``, ``search.py``, ``arrow_writer.py`` and ``metric.py`` and modified tests","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3305\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3304","id":1059130494,"node_id":"I_kwDODunzps4_IQx-","number":3304,"title":"Dataset object has no attribute `to_tf_dataset`","user":{"login":"RajkumarGalaxy","id":59993678,"node_id":"MDQ6VXNlcjU5OTkzNjc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59993678?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RajkumarGalaxy","html_url":"https:\/\/github.com\/RajkumarGalaxy","followers_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/followers","following_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/orgs","repos_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/repos","events_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-20T12:03:59Z","updated_at":"2021-11-21T07:07:25Z","closed_at":"2021-11-21T07:07:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am following HuggingFace Course. I am at Fine-tuning a model. \r\nLink: https:\/\/huggingface.co\/course\/chapter3\/2?fw=tf\r\n\r\nI use tokenize_function and `map` as mentioned in the course to process data.\r\n\r\n`# define a tokenize function`\r\n`def Tokenize_function(example):`\r\n` return tokenizer(example['sentence'], truncation=True)`\r\n\r\n`# tokenize entire data`\r\n`tokenized_data = raw_data.map(Tokenize_function, batched=True)`\r\n\r\nI get Dataset object at this point. When I try converting this to a TF dataset object as mentioned in the course, it throws the following error.\r\n\r\n`# convert to TF dataset`\r\n`train_data = tokenized_data[\"train\"].to_tf_dataset( `\r\n` columns = ['attention_mask', 'input_ids', 'token_type_ids'], `\r\n` label_cols = ['label'], `\r\n` shuffle = True, `\r\n` collate_fn = data_collator, `\r\n` batch_size = 8 `\r\n`)`\r\n\r\nOutput:\r\n\r\n`---------------------------------------------------------------------------`\r\n`AttributeError Traceback (most recent call last)`\r\n`\/tmp\/ipykernel_42\/103099799.py in `\r\n` 1 # convert to TF dataset`\r\n`----> 2 train_data = tokenized_data[\"train\"].to_tf_dataset( \\`\r\n` 3 columns = ['attention_mask', 'input_ids', 'token_type_ids'], \\`\r\n` 4 label_cols = ['label'], \\`\r\n` 5 shuffle = True, \\`\r\n`AttributeError: 'Dataset' object has no attribute 'to_tf_dataset'`\r\n\r\nWhen I look for `dir(tokenized_data[\"train\"])`, there is no method or attribute in the name of `to_tf_dataset`.\r\n\r\nWhy do I get this error? And how to clear this?\r\n\r\nPlease help me.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3304\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3303","id":1059129732,"node_id":"I_kwDODunzps4_IQmE","number":3303,"title":"DataCollatorWithPadding: TypeError","user":{"login":"RajkumarGalaxy","id":59993678,"node_id":"MDQ6VXNlcjU5OTkzNjc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59993678?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RajkumarGalaxy","html_url":"https:\/\/github.com\/RajkumarGalaxy","followers_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/followers","following_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/orgs","repos_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/repos","events_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RajkumarGalaxy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-20T11:59:55Z","updated_at":"2021-11-21T07:05:37Z","closed_at":"2021-11-21T07:05:37Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI am following the HuggingFace course. I am now at Fine-tuning [https:\/\/huggingface.co\/course\/chapter3\/3?fw=tf](https:\/\/huggingface.co\/course\/chapter3\/3?fw=tf). When I set up `DataCollatorWithPadding` as following I got an error while trying to reproduce the course code in Kaggle. This error occurs with either a CPU-only-device or a GPU-device.\r\n\r\nInput:\r\n```checkpoint = 'bert-base-uncased'\r\ntokenizer = AutoTokenizer.from_pretrained(checkpoint)\r\ndata_collator = DataCollatorWithPadding(tokenizer=tokenizer, return_tensors=\"tf\")\r\n```\r\n\r\nOutput:\r\n```---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n\/tmp\/ipykernel_42\/1563280798.py in \r\n 1 checkpoint = 'bert-base-uncased'\r\n 2 tokenizer = AutoTokenizer.from_pretrained(checkpoint)\r\n----> 3 data_collator = DataCollatorWithPadding(tokenizer=tokenizer, return_tensors=\"pt\")\r\nTypeError: __init__() got an unexpected keyword argument 'return_tensors'\r\n```\r\n\r\nWhen I call `help` method, it too confirms that there is no argument `return_tensors`.\r\nInput:\r\n```\r\nhelp(DataCollatorWithPadding.__init__)\r\n```\r\nOutput:\r\n```\r\nHelp on function __init__ in module transformers.data.data_collator:\r\n\r\n__init__(self, tokenizer: transformers.tokenization_utils_base.PreTrainedTokenizerBase, padding: Union[bool, str, transformers.file_utils.PaddingStrategy] = True, max_length: Union[int, NoneType] = None, pad_to_multiple_of: Union[int, NoneType] = None) -> None\r\n```\r\n\r\nBut, the source file *[Data Collator - docs](https:\/\/huggingface.co\/transformers\/main_classes\/data_collator.html#datacollatorwithpadding)* says that there is such an argument. By default, it returns Pytorch tensors while I need TF tensors.\r\n\r\nWhere do I miss?\r\nPlease help me.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3303\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3302","id":1058907168,"node_id":"PR_kwDODunzps4uynjc","number":3302,"title":"fix old_val typo in f-string","user":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-19T20:51:08Z","updated_at":"2021-11-25T22:14:43Z","closed_at":"2021-11-22T17:04:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3302","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3302","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3302.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3302.patch","merged_at":"2021-11-22T17:04:19Z"},"body":"\r\nThis PR is to correct a typo in #3277 that @Carlosbogo revieled in a comment.\r\n\r\nRelated closed issue : #3257 \r\n\r\nSorry about that \ud83d\ude05.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3302\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3301","id":1058718957,"node_id":"PR_kwDODunzps4uyA9o","number":3301,"title":"Add wikipedia tags","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-19T16:39:25Z","updated_at":"2021-11-19T16:49:30Z","closed_at":"2021-11-19T16:49:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3301","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3301","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3301.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3301.patch","merged_at":"2021-11-19T16:49:29Z"},"body":"Add the missing tags to the wikipedia dataset card.\r\n\r\nI also added the missing languages code in our language codes list.\r\n\r\nThis should also fix the code snippet that is presented on the Hub to load the dataset: fix https:\/\/github.com\/huggingface\/datasets\/issues\/3292","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3301\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3300","id":1058644459,"node_id":"I_kwDODunzps4_GaHr","number":3300,"title":"\u2753 Dataset loading script from Hugging Face Hub","user":{"login":"pietrolesci","id":61748653,"node_id":"MDQ6VXNlcjYxNzQ4NjUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/61748653?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pietrolesci","html_url":"https:\/\/github.com\/pietrolesci","followers_url":"https:\/\/api.github.com\/users\/pietrolesci\/followers","following_url":"https:\/\/api.github.com\/users\/pietrolesci\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pietrolesci\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pietrolesci\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pietrolesci\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pietrolesci\/orgs","repos_url":"https:\/\/api.github.com\/users\/pietrolesci\/repos","events_url":"https:\/\/api.github.com\/users\/pietrolesci\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pietrolesci\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"assignees":[{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":8,"created_at":"2021-11-19T15:20:52Z","updated_at":"2021-12-22T10:57:56Z","closed_at":"2021-12-22T10:57:56Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi there,\r\n\r\nI am trying to add my custom `ag_news` with its own loading script on the Hugging Face datasets hub. In particular, I would like to test the addition of a second configuration to the existing `ag_news` dataset. Once it works in my hub, I plan to make a PR to the original dataset. However, in trying to do so I have encountered certain problems as detailed below.\r\n\r\nIssues I have encountered:\r\n- Without a loading script, the train and test files are loaded together into a unique `dataset.Dataset` -> so I wrote a loading script. Also, I need a loading script otherwise I cannot specify multiple configurations\r\n- Once my loading script is working locally, I do not manage to make it work on the hub. In particular, I would like to be able to load the dataset like this\r\n```python\r\nload_dataset(\"pietrolesci\/ag_news\", name=\"my_configuration\")\r\n```\r\n\r\nApparently, the `load_dataset` is able to pick up the loading script from the hub and run it. However, it errors because it is unable to find the files. The structure of my hub repo is the following\r\n```\r\nag_news.py\r\ntrain.csv\r\ntest.csv\r\n```\r\nand the loading script I specify `data_dir=Path(__file__).parent` and `data_files=DataFilesDict({\"train\": \"train.csv\", \"test\": \"test.csv\"})`. In the documentation I could not find info regarding loading a dataset from the hub using a loading script present on the hub.\r\n\r\nAny suggestion is very much appreciated.\r\n\r\nBest,\r\nPietro\r\n\r\nLink to the hub repo: https:\/\/huggingface.co\/datasets\/pietrolesci\/ag_news\r\n\r\nBONUS: how can I make the data viewer work in this specific case? :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3300\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3299","id":1058518213,"node_id":"I_kwDODunzps4_F7TF","number":3299,"title":"Add option to find unique elements in nested sequences when calling `Dataset.unique`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-19T13:16:06Z","updated_at":"2021-11-19T13:16:06Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It would be nice to have an option to flatten nested sequences to find unique elements stored in them when calling `Dataset.unique`. Currently, `Dataset.unique` only supports finding unique sequences and not unique elements in that situation.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3299\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3298","id":1058420201,"node_id":"I_kwDODunzps4_FjXp","number":3298,"title":"Agnews dataset viewer is not working","user":{"login":"pietrolesci","id":61748653,"node_id":"MDQ6VXNlcjYxNzQ4NjUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/61748653?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pietrolesci","html_url":"https:\/\/github.com\/pietrolesci","followers_url":"https:\/\/api.github.com\/users\/pietrolesci\/followers","following_url":"https:\/\/api.github.com\/users\/pietrolesci\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pietrolesci\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pietrolesci\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pietrolesci\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pietrolesci\/orgs","repos_url":"https:\/\/api.github.com\/users\/pietrolesci\/repos","events_url":"https:\/\/api.github.com\/users\/pietrolesci\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pietrolesci\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-11-19T11:18:59Z","updated_at":"2021-12-21T16:24:05Z","closed_at":"2021-12-21T16:24:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*name of the dataset*'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/ag_news\r\n\r\nHi there, the `ag_news` dataset viewer is not working.\r\n\r\nAm I the one who added this dataset? No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3298\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3297","id":1058263859,"node_id":"I_kwDODunzps4_E9Mz","number":3297,"title":".map() cache is wrongfully reused - only happens when the mapping function is imported","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-19T08:18:36Z","updated_at":"2021-12-06T23:45:40Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen `.map` is used with a mapping function that is imported, the cache is reused even if the mapping function has been modified.\r\nThe reason for this is that `dill` that is used for creating the fingerprint [pickles imported functions by reference](https:\/\/stackoverflow.com\/a\/67851411).\r\n\r\nI guess it is not a widespread case, but it can still lead to unwanted results unnoticeably. \r\n\r\n## Steps to reproduce the bug\r\nCreate files `a.py` and `b.py`:\r\n```python\r\n# a.py\r\nfrom datasets import load_dataset\r\n\r\ndef main():\r\n squad = load_dataset(\"squad\")\r\n squad.map(mapping_func, batched=True)\r\n\r\ndef mapping_func(examples):\r\n ID_LENGTH = 4\r\n examples[\"id\"] = [id_[:ID_LENGTH] for id_ in examples[\"id\"]]\r\n return examples\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n```\r\n```python\r\n# b.py\r\nfrom datasets import load_dataset\r\nfrom a import mapping_func\r\n\r\ndef main():\r\n squad = load_dataset(\"squad\")\r\n squad.map(mapping_func, batched=True)\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n```\r\nRun `python b.py` twice: In the first run you will see tqdm bars showing that the data is processed, and in the second run you will see \"Loading cached processed dataset at...\".\r\nNow change `ID_LENGTH` to another number in order to change the mapping function, and run `python b.py` again. You'll see that `.map` loads from the cache the result of the previous mapping function.\r\n\r\n## Expected results\r\nRun `python a.py` twice: In the first run you will see tqdm bars showing that the data is processed, and in the second run you will see \"Loading cached processed dataset at...\".\r\nNow change `ID_LENGTH` to another number in order to change the mapping function, and run `python a.py` again. You'll see that the dataset is being processed and that there's no reuse of the previous mapping function result.\r\n\r\n## Workaround\r\nPut the mapping function inside a dummy class as a static method:\r\n```python\r\n# a.py\r\nclass MappingFuncClass:\r\n @staticmethod\r\n def mapping_func(examples):\r\n ID_LENGTH = 4\r\n examples[\"id\"] = [id_[:ID_LENGTH] for id_ in examples[\"id\"]]\r\n return examples\r\n```\r\n```python\r\n# b.py\r\nfrom datasets import load_dataset\r\nfrom a import MappingFuncClass\r\n\r\ndef main():\r\n squad = load_dataset(\"squad\")\r\n squad.map(MappingFuncClass.mapping_func, batched=True)\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-4.4.0-19041-Microsoft-x86_64-with-glibc2.17\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3297\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3296","id":1057970638,"node_id":"PR_kwDODunzps4uvlQz","number":3296,"title":"Fix temporary dataset_path creation for URIs related to remote fs","user":{"login":"francisco-perez-sorrosal","id":918006,"node_id":"MDQ6VXNlcjkxODAwNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918006?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal","html_url":"https:\/\/github.com\/francisco-perez-sorrosal","followers_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/followers","following_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/orgs","repos_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/repos","events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-18T23:32:45Z","updated_at":"2021-12-06T10:45:04Z","closed_at":"2021-12-06T10:45:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3296","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3296","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3296.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3296.patch","merged_at":"2021-12-06T10:45:03Z"},"body":"This aims to close #3295","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3296\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3295","id":1057954892,"node_id":"I_kwDODunzps4_DxxM","number":3295,"title":"Temporary dataset_path for remote fs URIs not built properly in arrow_dataset.py::load_from_disk","user":{"login":"francisco-perez-sorrosal","id":918006,"node_id":"MDQ6VXNlcjkxODAwNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918006?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal","html_url":"https:\/\/github.com\/francisco-perez-sorrosal","followers_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/followers","following_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/orgs","repos_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/repos","events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-18T23:24:02Z","updated_at":"2021-12-06T10:45:04Z","closed_at":"2021-12-06T10:45:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen trying to build a temporary dataset path from a remote URI in this block of code:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/42f6b1d18a4a1b6009b6e62d115491be16dfca22\/src\/datasets\/arrow_dataset.py#L1038-L1042\r\n\r\nthe result is not the expected when passing an absolute path in an URI like `hdfs:\/\/\/absolute\/path`.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset_path = \"hdfs:\/\/\/absolute\/path\"\r\nsrc_dataset_path = extract_path_from_uri(dataset_path)\r\ntmp_dir = get_temporary_cache_files_directory()\r\ndataset_path = Path(tmp_dir, src_dataset_path)\r\nprint(dataset_path)\r\n```\r\n\r\n## Expected results\r\nWith the code above, we would expect a value in `dataset_path` similar to:\r\n`\/tmp\/tmpnwxyvao5\/absolute\/path`\r\n\r\n## Actual results\r\nHowever, we get a `dataset_path` value like:\r\n`\/absolute\/path`\r\n\r\nThis is because this line here: https:\/\/github.com\/huggingface\/datasets\/blob\/42f6b1d18a4a1b6009b6e62d115491be16dfca22\/src\/datasets\/arrow_dataset.py#L1041\r\nreturns the last absolute path when two absolute paths (the one in `tmp_dir` and the one extracted from the URI in `src_dataset_path`) are passed as arguments.\r\n\r\n## Environment info\r\n- `datasets` version: 1.13.3\r\n- Platform: Linux-3.10.0-1160.15.2.el7.x86_64-x86_64-with-glibc2.33\r\n- Python version: 3.9.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3295\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3294","id":1057495473,"node_id":"I_kwDODunzps4_CBmx","number":3294,"title":"Add Natural Adversarial Objects dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-18T15:34:44Z","updated_at":"2021-12-08T12:00:02Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Natural Adversarial Objects (NAO)\r\n- **Description:** Natural Adversarial Objects (NAO) is a new dataset to evaluate the robustness of object detection models. NAO contains 7,934 images and 9,943 objects that are unmodified and representative of real-world scenarios, but cause state-of-the-art detection models to misclassify with high confidence.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2111.04204v1\r\n- **Data:** https:\/\/drive.google.com\/drive\/folders\/15P8sOWoJku6SSEiHLEts86ORfytGezi8\r\n- **Motivation:** interesting object detection dataset useful for miscclassifications\r\n\r\ncc @NielsRogge \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3294\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3293","id":1057004431,"node_id":"PR_kwDODunzps4uslLN","number":3293,"title":"Pin version exclusion for Markdown","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-18T06:56:01Z","updated_at":"2021-11-18T10:28:05Z","closed_at":"2021-11-18T10:28:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3293","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3293","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3293.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3293.patch","merged_at":"2021-11-18T10:28:04Z"},"body":"As Markdown version 3.3.5 has a bug, it is better to exclude it in case the users have it previously installed in their environment.\r\n\r\nRelated to #3289, #3286.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3293\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3292","id":1056962554,"node_id":"I_kwDODunzps4-__f6","number":3292,"title":"Not able to load 'wikipedia' dataset","user":{"login":"abhibisht89","id":13541524,"node_id":"MDQ6VXNlcjEzNTQxNTI0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13541524?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhibisht89","html_url":"https:\/\/github.com\/abhibisht89","followers_url":"https:\/\/api.github.com\/users\/abhibisht89\/followers","following_url":"https:\/\/api.github.com\/users\/abhibisht89\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhibisht89\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhibisht89\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhibisht89\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhibisht89\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhibisht89\/repos","events_url":"https:\/\/api.github.com\/users\/abhibisht89\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhibisht89\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-11-18T05:41:18Z","updated_at":"2021-11-19T16:49:29Z","closed_at":"2021-11-19T16:49:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am following the instruction for loading the wikipedia dataset using datasets. However getting the below error.\r\n\r\n## Steps to reproduce the bug\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"wikipedia\")\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\n~\/anaconda3\/envs\/pytorch_p36\/lib\/python3.6\/site-packages\/datasets\/builder.py in _create_builder_config(self, name, custom_features, **config_kwargs)\r\n 339 \"Config name is missing.\"\r\n 340 \"\\nPlease pick one among the available configs: %s\" % list(self.builder_configs.keys())\r\n--> 341 + \"\\nExample of usage:\\n\\t`{}`\".format(example_of_usage)\r\n 342 )\r\n 343 builder_config = self.BUILDER_CONFIGS[0]\r\n\r\nValueError: Config name is missing.\r\nPlease pick one among the available configs: ['20200501.aa', '20200501.ab', '20200501.ace', '20200501.ady', '20200501.af', '20200501.ak', '20200501.als', '20200501.am', '20200501.an', '20200501.ang', '20200501.ar', '20200501.arc', '20200501.arz', '20200501.as', '20200501.ast', '20200501.atj', '20200501.av', '20200501.ay', '20200501.az', '20200501.azb', '20200501.ba', '20200501.bar', '20200501.bat-smg', '20200501.bcl', '20200501.be', '20200501.be-x-old', '20200501.bg', '20200501.bh', '20200501.bi', '20200501.bjn', '20200501.bm', '20200501.bn', '20200501.bo', '20200501.bpy', '20200501.br', '20200501.bs', '20200501.bug', '20200501.bxr', '20200501.ca', '20200501.cbk-zam', '20200501.cdo', '20200501.ce', '20200501.ceb', '20200501.ch', '20200501.cho', '20200501.chr', '20200501.chy', '20200501.ckb', '20200501.co', '20200501.cr', '20200501.crh', '20200501.cs', '20200501.csb', '20200501.cu', '20200501.cv', '20200501.cy', '20200501.da', '20200501.de', '20200501.din', '20200501.diq', '20200501.dsb', '20200501.dty', '20200501.dv', '20200501.dz', '20200501.ee', '20200501.el', '20200501.eml', '20200501.en', '20200501.eo', '20200501.es', '20200501.et', '20200501.eu', '20200501.ext', '20200501.fa', '20200501.ff', '20200501.fi', '20200501.fiu-vro', '20200501.fj', '20200501.fo', '20200501.fr', '20200501.frp', '20200501.frr', '20200501.fur', '20200501.fy', '20200501.ga', '20200501.gag', '20200501.gan', '20200501.gd', '20200501.gl', '20200501.glk', '20200501.gn', '20200501.gom', '20200501.gor', '20200501.got', '20200501.gu', '20200501.gv', '20200501.ha', '20200501.hak', '20200501.haw', '20200501.he', '20200501.hi', '20200501.hif', '20200501.ho', '20200501.hr', '20200501.hsb', '20200501.ht', '20200501.hu', '20200501.hy', '20200501.ia', '20200501.id', '20200501.ie', '20200501.ig', '20200501.ii', '20200501.ik', '20200501.ilo', '20200501.inh', '20200501.io', '20200501.is', '20200501.it', '20200501.iu', '20200501.ja', '20200501.jam', '20200501.jbo', '20200501.jv', '20200501.ka', '20200501.kaa', '20200501.kab', '20200501.kbd', '20200501.kbp', '20200501.kg', '20200501.ki', '20200501.kj', '20200501.kk', '20200501.kl', '20200501.km', '20200501.kn', '20200501.ko', '20200501.koi', '20200501.krc', '20200501.ks', '20200501.ksh', '20200501.ku', '20200501.kv', '20200501.kw', '20200501.ky', '20200501.la', '20200501.lad', '20200501.lb', '20200501.lbe', '20200501.lez', '20200501.lfn', '20200501.lg', '20200501.li', '20200501.lij', '20200501.lmo', '20200501.ln', '20200501.lo', '20200501.lrc', '20200501.lt', '20200501.ltg', '20200501.lv', '20200501.mai', '20200501.map-bms', '20200501.mdf', '20200501.mg', '20200501.mh', '20200501.mhr', '20200501.mi', '20200501.min', '20200501.mk', '20200501.ml', '20200501.mn', '20200501.mr', '20200501.mrj', '20200501.ms', '20200501.mt', '20200501.mus', '20200501.mwl', '20200501.my', '20200501.myv', '20200501.mzn', '20200501.na', '20200501.nah', '20200501.nap', '20200501.nds', '20200501.nds-nl', '20200501.ne', '20200501.new', '20200501.ng', '20200501.nl', '20200501.nn', '20200501.no', '20200501.nov', '20200501.nrm', '20200501.nso', '20200501.nv', '20200501.ny', '20200501.oc', '20200501.olo', '20200501.om', '20200501.or', '20200501.os', '20200501.pa', '20200501.pag', '20200501.pam', '20200501.pap', '20200501.pcd', '20200501.pdc', '20200501.pfl', '20200501.pi', '20200501.pih', '20200501.pl', '20200501.pms', '20200501.pnb', '20200501.pnt', '20200501.ps', '20200501.pt', '20200501.qu', '20200501.rm', '20200501.rmy', '20200501.rn', '20200501.ro', '20200501.roa-rup', '20200501.roa-tara', '20200501.ru', '20200501.rue', '20200501.rw', '20200501.sa', '20200501.sah', '20200501.sat', '20200501.sc', '20200501.scn', '20200501.sco', '20200501.sd', '20200501.se', '20200501.sg', '20200501.sh', '20200501.si', '20200501.simple', '20200501.sk', '20200501.sl', '20200501.sm', '20200501.sn', '20200501.so', '20200501.sq', '20200501.sr', '20200501.srn', '20200501.ss', '20200501.st', '20200501.stq', '20200501.su', '20200501.sv', '20200501.sw', '20200501.szl', '20200501.ta', '20200501.tcy', '20200501.te', '20200501.tet', '20200501.tg', '20200501.th', '20200501.ti', '20200501.tk', '20200501.tl', '20200501.tn', '20200501.to', '20200501.tpi', '20200501.tr', '20200501.ts', '20200501.tt', '20200501.tum', '20200501.tw', '20200501.ty', '20200501.tyv', '20200501.udm', '20200501.ug', '20200501.uk', '20200501.ur', '20200501.uz', '20200501.ve', '20200501.vec', '20200501.vep', '20200501.vi', '20200501.vls', '20200501.vo', '20200501.wa', '20200501.war', '20200501.wo', '20200501.wuu', '20200501.xal', '20200501.xh', '20200501.xmf', '20200501.yi', '20200501.yo', '20200501.za', '20200501.zea', '20200501.zh', '20200501.zh-classical', '20200501.zh-min-nan', '20200501.zh-yue', '20200501.zu']\r\nExample of usage:\r\n\t`load_dataset('wikipedia', '20200501.aa')`\r\n\r\nI think the other parameter is missing in the load_dataset function that is not shown in the instruction.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3292\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3291","id":1056689876,"node_id":"PR_kwDODunzps4urikR","number":3291,"title":"Use f-strings in the dataset scripts","user":{"login":"Carlosbogo","id":84228424,"node_id":"MDQ6VXNlcjg0MjI4NDI0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/84228424?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Carlosbogo","html_url":"https:\/\/github.com\/Carlosbogo","followers_url":"https:\/\/api.github.com\/users\/Carlosbogo\/followers","following_url":"https:\/\/api.github.com\/users\/Carlosbogo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Carlosbogo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Carlosbogo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Carlosbogo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Carlosbogo\/orgs","repos_url":"https:\/\/api.github.com\/users\/Carlosbogo\/repos","events_url":"https:\/\/api.github.com\/users\/Carlosbogo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Carlosbogo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-17T22:20:19Z","updated_at":"2021-11-22T16:40:16Z","closed_at":"2021-11-22T16:40:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3291","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3291","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3291.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3291.patch","merged_at":"2021-11-22T16:40:16Z"},"body":"Uses f-strings to format the .py files in the dataset folder","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3291\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3290","id":1056414856,"node_id":"PR_kwDODunzps4uqzcv","number":3290,"title":"Make several audio datasets streamable","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-11-17T17:43:41Z","updated_at":"2022-02-01T21:00:52Z","closed_at":"2021-11-19T15:08:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3290","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3290","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3290.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3290.patch","merged_at":"2021-11-19T15:08:57Z"},"body":"Needs https:\/\/github.com\/huggingface\/datasets\/pull\/3129 to be merged first<\/s>\r\n\r\nMake those audio datasets streamable:\r\n- [x] common_voice\r\n- [x] openslr\r\n- [x] vivos\r\n- [x] librispeech_asr (still has some issues to read FLAC)<\/s> *actually it's ok*\r\n- [ ] multilingual_librispeech (yet to be converted)<\/S> *TODO in a separate PR*","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3290\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3289","id":1056323715,"node_id":"PR_kwDODunzps4uqf79","number":3289,"title":"Unpin markdown for build_docs now that it's fixed","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-17T16:22:53Z","updated_at":"2021-11-17T16:23:09Z","closed_at":"2021-11-17T16:23:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3289","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3289","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3289.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3289.patch","merged_at":"2021-11-17T16:23:08Z"},"body":"`markdown`'s bug has been fixed, so this PR reverts #3286 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3289\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3288","id":1056145703,"node_id":"PR_kwDODunzps4up6S5","number":3288,"title":"Allow datasets with indices table when concatenating along axis=1","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-17T13:41:28Z","updated_at":"2021-11-17T15:41:12Z","closed_at":"2021-11-17T15:41:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3288","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3288","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3288.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3288.patch","merged_at":"2021-11-17T15:41:11Z"},"body":"Calls `flatten_indices` on the datasets with indices table in `concatenate_datasets` to fix issues when concatenating along `axis=1`.\r\n\r\n\r\ncc @lhoestq: I decided to flatten all the datasets instead of flattening all the datasets except the largest one in the end. The latter approach fails on the following example:\r\n```python\r\na = Dataset.from_dict({\"a\": [10, 20, 30, 40]})\r\nb = Dataset.from_dict({\"b\": [10, 20, 30, 40, 50, 60]}) # largest dataset\r\na = a.select([1, 2, 3])\r\nb = b.select([1, 2, 3])\r\nconcatenate_datasets([a, b], axis=1) # fails at line concat_tables(...) because the real length of b's data is 6 and a's length is 3 after flattening (was 4 before flattening)\r\n```\r\n\r\nAlso, it requires additional re-ordering of indices to prepare them for working with the indices table of the largest dataset. IMO not worth when we save only one `flatten_indices` call. (feel free to check the code of that approach at https:\/\/github.com\/huggingface\/datasets\/commit\/6acd10481c70950dcfdbfd2bab0bf0c74ad80bcb if you are interested)\r\n\r\nFixes #3273\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3288\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3287","id":1056079724,"node_id":"PR_kwDODunzps4upsWR","number":3287,"title":"Add The Pile dataset and PubMed Central subset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-17T12:35:58Z","updated_at":"2021-12-01T15:29:08Z","closed_at":"2021-12-01T15:29:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3287","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3287","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3287.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3287.patch","merged_at":"2021-12-01T15:29:06Z"},"body":"Add:\r\n- The complete final version of The Pile dataset: \"all\" config\r\n- PubMed Central subset of The Pile: \"pubmed_central\" config\r\n\r\nClose #1675, close bigscience-workshop\/data_tooling#74.\r\n\r\nCC: @StellaAthena, @lewtun ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287\/reactions","total_count":4,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":4,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3287\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3286","id":1056008586,"node_id":"PR_kwDODunzps4updTK","number":3286,"title":"Fix build_docs CI","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-17T11:18:56Z","updated_at":"2021-11-17T11:19:20Z","closed_at":"2021-11-17T11:19:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3286","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3286","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3286.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3286.patch","merged_at":"2021-11-17T11:19:19Z"},"body":"Because of https:\/\/github.com\/Python-Markdown\/markdown\/issues\/1196 we have to temporarily pin `markdown` to 3.3.4 for the docs to build without issues","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3286\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3285","id":1055506730,"node_id":"I_kwDODunzps4-6cEq","number":3285,"title":"Add IEMOCAP dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-16T22:47:20Z","updated_at":"2021-12-08T11:57:44Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** IEMOCAP\r\n- **Description:** acted, multimodal and multispeaker database\r\n- **Paper:** https:\/\/sail.usc.edu\/iemocap\/Busso_2008_iemocap.pdf\r\n- **Data:** https:\/\/sail.usc.edu\/iemocap\/index.html\r\n- **Motivation:** Useful multimodal dataset\r\n\r\ncc @anton-l \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3285\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3284","id":1055502909,"node_id":"I_kwDODunzps4-6bI9","number":3284,"title":"Add VoxLingua107 dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"assignees":[{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-11-16T22:44:08Z","updated_at":"2021-12-06T09:49:45Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** VoxLingua107\r\n- **Description:** VoxLingua107 is a speech dataset for training spoken language identification models. The dataset consists of short speech segments automatically extracted from YouTube videos and labeled according the language of the video title and description, with some post-processing steps to filter out false positives.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2011.12998\r\n- **Data:** http:\/\/bark.phon.ioc.ee\/voxlingua107\/\r\n- **Motivation:** Nice audio classification dataset\r\n\r\ncc @anton-l \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3284\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3283","id":1055495874,"node_id":"I_kwDODunzps4-6ZbC","number":3283,"title":"Add Speech Commands dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"closed","locked":false,"assignee":{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false},"assignees":[{"login":"polinaeterna","id":16348744,"node_id":"MDQ6VXNlcjE2MzQ4NzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16348744?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polinaeterna","html_url":"https:\/\/github.com\/polinaeterna","followers_url":"https:\/\/api.github.com\/users\/polinaeterna\/followers","following_url":"https:\/\/api.github.com\/users\/polinaeterna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polinaeterna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polinaeterna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polinaeterna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polinaeterna\/orgs","repos_url":"https:\/\/api.github.com\/users\/polinaeterna\/repos","events_url":"https:\/\/api.github.com\/users\/polinaeterna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polinaeterna\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-11-16T22:39:56Z","updated_at":"2021-12-10T10:30:15Z","closed_at":"2021-12-10T10:30:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Speech commands\r\n- **Description:** A Dataset for Limited-Vocabulary Speech Recognition\r\n- **Paper:** https:\/\/arxiv.org\/abs\/1804.03209\r\n- **Data:** https:\/\/www.tensorflow.org\/datasets\/catalog\/speech_commands, Available:\r\nhttp:\/\/download.tensorflow.org\/data\/speech_commands_v0.02.tar.gz\r\n- **Motivation:** Nice dataset for audio classification training\r\n\r\ncc @anton-l \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3283\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3282","id":1055054898,"node_id":"I_kwDODunzps4-4twy","number":3282,"title":"ConnectionError: Couldn't reach https:\/\/huggingface.co\/datasets\/oscar-corpus\/OSCAR-2109\/resolve\/main\/OSCAR-2109.py","user":{"login":"MinionAttack","id":10078549,"node_id":"MDQ6VXNlcjEwMDc4NTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10078549?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MinionAttack","html_url":"https:\/\/github.com\/MinionAttack","followers_url":"https:\/\/api.github.com\/users\/MinionAttack\/followers","following_url":"https:\/\/api.github.com\/users\/MinionAttack\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MinionAttack\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MinionAttack\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MinionAttack\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MinionAttack\/orgs","repos_url":"https:\/\/api.github.com\/users\/MinionAttack\/repos","events_url":"https:\/\/api.github.com\/users\/MinionAttack\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MinionAttack\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-11-16T16:05:19Z","updated_at":"2021-11-29T08:17:29Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*oscar-corpus\/OSCAR-2109*'\r\n\r\n**Link:** *[link to the dataset viewer page](https:\/\/huggingface.co\/datasets\/oscar-corpus\/OSCAR-2109)*\r\n\r\n*The dataset library cannot download any language from the oscar-corpus\/OSCAR-2109 dataset. By entering the URL in your browser I can access the file.*\r\n\r\n```\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/huggingface.co\/datasets\/oscar-corpus\/OSCAR-2109\/resolve\/main\/OSCAR-2109.py\r\n```\r\n\r\nAm I the one who added this dataset ? No\r\n\r\nUsing the older version of [OSCAR](https:\/\/huggingface.co\/datasets\/oscar) I don't have any issues downloading languages with the dataset library.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3282\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3281","id":1055018876,"node_id":"PR_kwDODunzps4umWZE","number":3281,"title":"[Datasets] Improve Covost 2","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-16T15:32:19Z","updated_at":"2022-01-26T16:17:06Z","closed_at":"2021-11-18T10:44:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3281","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3281","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3281.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3281.patch","merged_at":"2021-11-18T10:44:04Z"},"body":"It's currently quite confusing to understand the manual data download instruction of Covost and not very user-friendly.\r\n\r\nCurrenty the user has to:\r\n\r\n1. Go on Common Voice website\r\n2. Find the correct dataset which is **not** mentioned in the error message\r\n3. Download it\r\n4. Untar it\r\n5. Create a language id folder (why? this folder does not exist in the `.tar` downloaded file)\r\n6. pass the folder containing the created language id folder\r\n\r\nThis PR improves this to:\r\n\r\n1. Go on Common Voice website\r\n2. Find the correct dataset which **is** mentioned in the error message\r\n3. Download it\r\n4. Untar it\r\n5. pass the untared folder\r\n\r\n**Note**: This PR is not at all time-critical ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3281\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3280","id":1054766828,"node_id":"PR_kwDODunzps4ulgye","number":3280,"title":"Fix bookcorpusopen RAM usage","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-16T11:27:52Z","updated_at":"2021-11-17T15:53:28Z","closed_at":"2021-11-16T13:34:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3280","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3280","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3280.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3280.patch","merged_at":"2021-11-16T13:34:30Z"},"body":"Each document is a full book, so the default arrow writer batch size of 10,000 is too big, and it can fill up RAM quickly before flushing the first batch on disk. I changed its batch size to 256 to use maximum 100MB of memory\r\n\r\nFix #3167.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3280\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3279","id":1054711852,"node_id":"PR_kwDODunzps4ulVHe","number":3279,"title":"Minor Typo Fix - Precision to Recall","user":{"login":"SebastinSanty","id":13795788,"node_id":"MDQ6VXNlcjEzNzk1Nzg4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795788?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SebastinSanty","html_url":"https:\/\/github.com\/SebastinSanty","followers_url":"https:\/\/api.github.com\/users\/SebastinSanty\/followers","following_url":"https:\/\/api.github.com\/users\/SebastinSanty\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SebastinSanty\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SebastinSanty\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SebastinSanty\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SebastinSanty\/orgs","repos_url":"https:\/\/api.github.com\/users\/SebastinSanty\/repos","events_url":"https:\/\/api.github.com\/users\/SebastinSanty\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SebastinSanty\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-16T10:32:22Z","updated_at":"2021-11-16T11:18:03Z","closed_at":"2021-11-16T11:18:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3279","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3279","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3279.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3279.patch","merged_at":"2021-11-16T11:18:02Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3279\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3278","id":1054249463,"node_id":"PR_kwDODunzps4uj2EQ","number":3278,"title":"Proposed update to the documentation for WER","user":{"login":"wooters","id":2111202,"node_id":"MDQ6VXNlcjIxMTEyMDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2111202?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wooters","html_url":"https:\/\/github.com\/wooters","followers_url":"https:\/\/api.github.com\/users\/wooters\/followers","following_url":"https:\/\/api.github.com\/users\/wooters\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wooters\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wooters\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wooters\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wooters\/orgs","repos_url":"https:\/\/api.github.com\/users\/wooters\/repos","events_url":"https:\/\/api.github.com\/users\/wooters\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wooters\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-15T23:28:31Z","updated_at":"2021-11-16T11:19:37Z","closed_at":"2021-11-16T11:19:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3278","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3278","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3278.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3278.patch","merged_at":"2021-11-16T11:19:37Z"},"body":"I wanted to submit a minor update to the description of WER for your consideration. \r\n\r\nBecause of the possibility of insertions, the numerator in the WER formula can be larger than N, so the value of WER can be greater than 1.0:\r\n\r\n```\r\n>>> from datasets import load_metric\r\n>>> metric = load_metric(\"wer\")\r\n>>> metric.compute(predictions=[\"hello how are you\"], references=[\"hello\"])\r\n3.0\r\n```\r\n\r\nand similarly from the underlying jiwer module's `wer` function:\r\n\r\n```\r\n>>> from jiwer import wer\r\n>>> wer(\"hello\", \"hello how are you\")\r\n3.0\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3278\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3277","id":1054122656,"node_id":"PR_kwDODunzps4ujk11","number":3277,"title":"f-string formatting","user":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-15T21:37:05Z","updated_at":"2021-11-19T20:40:08Z","closed_at":"2021-11-17T16:18:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3277","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3277","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3277.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3277.patch","merged_at":"2021-11-17T16:18:38Z"},"body":"**Fix #3257**\r\n\r\nReplaced _.format()_ and _%_ by f-strings in the following modules : \r\n- [x] **tests**\r\n- [x] **metrics**\r\n- [x] **benchmarks**\r\n- [x] **utils**\r\n- [x] **templates**\r\n- [x] **src\/Datasets\/\\*.py**\r\n\r\nModules in **_src\/Datasets\/_**: \r\n- [x] **commands**\r\n- [x] **features**\r\n- [x] **formatting**\r\n- [x] **io**\r\n- [x] **tasks**\r\n- [x] **utils**\r\n\r\n\r\nModule **datasets** will not be edited as asked by @mariosasko \r\n\r\n-A correction of the first PR (#3267)-\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3277\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3276","id":1053793063,"node_id":"PR_kwDODunzps4uihih","number":3276,"title":"Update KILT metadata JSON","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-15T15:25:25Z","updated_at":"2021-11-16T11:21:59Z","closed_at":"2021-11-16T11:21:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3276","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3276","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3276.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3276.patch","merged_at":"2021-11-16T11:21:58Z"},"body":"Fix #3265.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3276\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3275","id":1053698898,"node_id":"PR_kwDODunzps4uiN9t","number":3275,"title":"Force data files extraction if download_mode='force_redownload'","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-15T14:00:24Z","updated_at":"2021-11-15T14:45:23Z","closed_at":"2021-11-15T14:45:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3275","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3275","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3275.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3275.patch","merged_at":"2021-11-15T14:45:23Z"},"body":"Avoids weird issues when redownloading a dataset due to cached data not being fully updated.\r\n\r\nWith this change, issues #3122 and https:\/\/github.com\/huggingface\/datasets\/issues\/2956 (not a fix, but a workaround) can be fixed as follows:\r\n```python\r\ndset = load_dataset(..., download_mode=\"force_redownload\")\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3275\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3274","id":1053689140,"node_id":"PR_kwDODunzps4uiL8-","number":3274,"title":"Fix some contact information formats","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-15T13:50:34Z","updated_at":"2021-11-15T14:43:55Z","closed_at":"2021-11-15T14:43:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3274","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3274","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3274.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3274.patch","merged_at":"2021-11-15T14:43:54Z"},"body":"As reported in https:\/\/github.com\/huggingface\/datasets\/issues\/3188 some contact information are not displayed correctly.\r\nThis PR fixes this for CoNLL-2002 and some other datasets with the same issue","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3274\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3273","id":1053554038,"node_id":"I_kwDODunzps4-y_V2","number":3273,"title":"Respect row ordering when concatenating datasets along axis=1","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-15T11:27:14Z","updated_at":"2021-11-17T15:41:11Z","closed_at":"2021-11-17T15:41:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, there is a bug when concatenating datasets along `axis=1` if more than one dataset has the `_indices` attribute defined. In that scenario, all indices mappings except the first one get ignored.\r\n\r\nA minimal reproducible example:\r\n```python\r\n>>> from datasets import Dataset, concatenate_datasets\r\n>>> a = Dataset.from_dict({\"a\": [30, 20, 10]})\r\n>>> b = Dataset.from_dict({\"b\": [2, 1, 3]})\r\n>>> d = concatenate_datasets([a.sort(\"a\"), b.sort(\"b\")], axis=1)\r\n>>> print(d[:3]) # expected: {'a': [10, 20, 30], 'b': [1, 2, 3]}\r\n{'a': [10, 20, 30], 'b': [3, 1, 2]}\r\n```\r\n\r\nI've noticed the bug while working on #3195. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3273\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3272","id":1053516479,"node_id":"I_kwDODunzps4-y2K_","number":3272,"title":"Make iter_archive work with ZIP files","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"assignees":[{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-11-15T10:50:42Z","updated_at":"2021-11-25T00:08:47Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently users can use `dl_manager.iter_archive` in their dataset script to iterate over all the files of a TAR archive.\r\nIt would be nice if it could work with ZIP files too !","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3272\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3271","id":1053482919,"node_id":"PR_kwDODunzps4uhgi1","number":3271,"title":"Decode audio from remote","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-15T10:25:56Z","updated_at":"2021-11-16T11:35:58Z","closed_at":"2021-11-16T11:35:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3271","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3271","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3271.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3271.patch","merged_at":"2021-11-16T11:35:58Z"},"body":"Currently the Audio feature type can only decode local audio files, not remote files.\r\n\r\nTo fix this I replaced `open` with our `xopen` functoin that is compatible with remote files in audio.py\r\n\r\ncc @albertvillanova @mariosasko ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3271\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3270","id":1053465662,"node_id":"PR_kwDODunzps4uhcxm","number":3270,"title":"Add os.listdir for streaming","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-15T10:14:04Z","updated_at":"2021-11-15T10:27:03Z","closed_at":"2021-11-15T10:27:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3270","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3270","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3270.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3270.patch","merged_at":"2021-11-15T10:27:02Z"},"body":"Extend `os.listdir` to support streaming data from remote files. This is often used to navigate in remote ZIP files for example","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3270\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3269","id":1053218769,"node_id":"I_kwDODunzps4-xtfR","number":3269,"title":"coqa NonMatchingChecksumError","user":{"login":"ZhaofengWu","id":11954789,"node_id":"MDQ6VXNlcjExOTU0Nzg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11954789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZhaofengWu","html_url":"https:\/\/github.com\/ZhaofengWu","followers_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/followers","following_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/repos","events_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":18,"created_at":"2021-11-15T05:04:07Z","updated_at":"2022-01-19T13:58:19Z","closed_at":"2022-01-19T13:58:19Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset(\"coqa\")\r\nDownloading: 3.82kB [00:00, 1.26MB\/s] \r\nDownloading: 1.79kB [00:00, 733kB\/s] \r\nUsing custom data configuration default\r\nDownloading and preparing dataset coqa\/default (download: 55.40 MiB, generated: 18.35 MiB, post-processed: Unknown size, total: 73.75 MiB) to \/Users\/zhaofengw\/.cache\/huggingface\/datasets\/coqa\/default\/1.0.0\/553ce70bfdcd15ff4b5f4abc4fc2f37137139cde1f58f4f60384a53a327716f0...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 222\/222 [00:00<00:00, 1.38MB\/s]\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 222\/222 [00:00<00:00, 1.32MB\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:01<00:00, 1.91it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 1117.44it\/s]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 679, in _download_and_prepare\r\n verify_checksums(\r\n File \"\/Users\/zhaofengw\/miniconda3\/lib\/python3.9\/site-packages\/datasets\/utils\/info_utils.py\", line 40, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-train-v1.0.json', 'https:\/\/nlp.stanford.edu\/data\/coqa\/coqa-dev-v1.0.json']\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3269\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3268","id":1052992681,"node_id":"I_kwDODunzps4-w2Sp","number":3268,"title":"Dataset viewer issue for 'liweili\/c4_200m'","user":{"login":"liliwei25","id":22389228,"node_id":"MDQ6VXNlcjIyMzg5MjI4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22389228?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/liliwei25","html_url":"https:\/\/github.com\/liliwei25","followers_url":"https:\/\/api.github.com\/users\/liliwei25\/followers","following_url":"https:\/\/api.github.com\/users\/liliwei25\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/liliwei25\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/liliwei25\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/liliwei25\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/liliwei25\/orgs","repos_url":"https:\/\/api.github.com\/users\/liliwei25\/repos","events_url":"https:\/\/api.github.com\/users\/liliwei25\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/liliwei25\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"assignees":[{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-11-14T17:18:46Z","updated_at":"2021-12-21T10:25:20Z","closed_at":"2021-12-21T10:24:51Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*liweili\/c4_200m*'\r\n\r\n**Link:** *[link to the dataset viewer page](https:\/\/huggingface.co\/datasets\/liweili\/c4_200m)*\r\n\r\n*Server Error*\r\n```\r\nStatus code: 404\r\nException: Status404Error\r\nMessage: Not found. Maybe the cache is missing, or maybe the ressource does not exist.\r\n```\r\n\r\nAm I the one who added this dataset ? Yes\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3268\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3267","id":1052750084,"node_id":"PR_kwDODunzps4ufQzB","number":3267,"title":"Replacing .format() and % by f-strings","user":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-11-13T19:12:02Z","updated_at":"2021-11-16T21:00:26Z","closed_at":"2021-11-16T14:55:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3267","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3267","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3267.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3267.patch","merged_at":null},"body":"**Fix #3257**\r\n\r\nReplaced _.format()_ and _%_ by f-strings in the following modules : \r\n- [x] **tests**\r\n- [x] **metrics**\r\n- [x] **benchmarks**\r\n- [x] **utils**\r\n- [x] **templates**\r\n\r\nWill follow in the next PR the modules left : \r\n- [ ] **src**\r\n\r\nModule **datasets** will not be edited as asked by @mariosasko \r\n\r\nPS : black and isort applied to files\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3267\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3266","id":1052700155,"node_id":"PR_kwDODunzps4ufH94","number":3266,"title":"Fix URLs for WikiAuto Manual, jeopardy and definite_pronoun_resolution","user":{"login":"LashaO","id":28014149,"node_id":"MDQ6VXNlcjI4MDE0MTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28014149?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LashaO","html_url":"https:\/\/github.com\/LashaO","followers_url":"https:\/\/api.github.com\/users\/LashaO\/followers","following_url":"https:\/\/api.github.com\/users\/LashaO\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LashaO\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LashaO\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LashaO\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LashaO\/orgs","repos_url":"https:\/\/api.github.com\/users\/LashaO\/repos","events_url":"https:\/\/api.github.com\/users\/LashaO\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LashaO\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-11-13T15:01:34Z","updated_at":"2021-12-06T11:16:31Z","closed_at":"2021-12-06T11:16:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3266","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3266","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3266.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3266.patch","merged_at":"2021-12-06T11:16:31Z"},"body":"[#3264](https:\/\/github.com\/huggingface\/datasets\/issues\/3264)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3266\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3265","id":1052666558,"node_id":"I_kwDODunzps4-vmq-","number":3265,"title":"Checksum error for kilt_task_wow","user":{"login":"slyviacassell","id":22296717,"node_id":"MDQ6VXNlcjIyMjk2NzE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22296717?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slyviacassell","html_url":"https:\/\/github.com\/slyviacassell","followers_url":"https:\/\/api.github.com\/users\/slyviacassell\/followers","following_url":"https:\/\/api.github.com\/users\/slyviacassell\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slyviacassell\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slyviacassell\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slyviacassell\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slyviacassell\/orgs","repos_url":"https:\/\/api.github.com\/users\/slyviacassell\/repos","events_url":"https:\/\/api.github.com\/users\/slyviacassell\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slyviacassell\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-11-13T12:04:17Z","updated_at":"2021-11-16T11:23:53Z","closed_at":"2021-11-16T11:21:58Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nChecksum failed when downloads kilt_tasks_wow. See error output for details.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\ndatasets.load_datasets('kilt_tasks','wow')\r\n```\r\n\r\n## Expected results\r\nDownload successful\r\n\r\n## Actual results\r\n```\r\nDownloading and preparing dataset kilt_tasks\/wow (download: 72.07 MiB, generated: 61.82 MiB, post-processed: Unknown size, total: 133.89 MiB) to \/root\/.cache\/huggingface\/datasets\/kilt_tasks\/wow\/1.0.0\/57dc8b2431e76637e0c6ef79689ca4af61ed3a330e2e0cd62c8971465a35db3a...\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 3\/3 [00:00<00:00, 5121.25it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 3\/3 [00:00<00:00, 1527.42it\/s]\r\nTraceback (most recent call last):\r\n File \"kilt_wow.py\", line 30, in \r\n main()\r\n File \"kilt_wow.py\", line 27, in main\r\n train, dev, test = dataset.generate_k_shot_data(k=32, seed=seed, path=\"..\/data\/\")\r\n File \"\/workspace\/projects\/CrossFit\/tasks\/fewshot_gym_dataset.py\", line 79, in generate_k_shot_data\r\n dataset = self.load_dataset()\r\n File \"kilt_wow.py\", line 21, in load_dataset\r\n return datasets.load_dataset('kilt_tasks','wow')\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 679, in _download_and_prepare\r\n verify_checksums(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 40, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['http:\/\/dl.fbaipublicfiles.com\/KILT\/wow-train-kilt.jsonl', 'http:\/\/dl.fbaipublicfiles.com\/KILT\/wow-dev-kilt.jsonl']\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-4.15.0-161-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.3\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3265\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3264","id":1052663513,"node_id":"I_kwDODunzps4-vl7Z","number":3264,"title":"Downloading URL change for WikiAuto Manual, jeopardy and definite_pronoun_resolution","user":{"login":"slyviacassell","id":22296717,"node_id":"MDQ6VXNlcjIyMjk2NzE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22296717?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slyviacassell","html_url":"https:\/\/github.com\/slyviacassell","followers_url":"https:\/\/api.github.com\/users\/slyviacassell\/followers","following_url":"https:\/\/api.github.com\/users\/slyviacassell\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slyviacassell\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slyviacassell\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slyviacassell\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slyviacassell\/orgs","repos_url":"https:\/\/api.github.com\/users\/slyviacassell\/repos","events_url":"https:\/\/api.github.com\/users\/slyviacassell\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slyviacassell\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-11-13T11:47:12Z","updated_at":"2021-11-13T13:39:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n- WikiAuto Manual \r\nThe original manual datasets with the following downloading URL in this [repository](https:\/\/github.com\/chaojiang06\/wiki-auto) was [deleted](https:\/\/github.com\/chaojiang06\/wiki-auto\/commit\/0af9b066f2b4e02726fb8a9be49283c0ad25367f) by the author. \r\n```\r\nhttps:\/\/github.com\/chaojiang06\/wiki-auto\/raw\/master\/wiki-manual\/train.tsv\r\n```\r\n\r\n- jeopardy \r\nThe downloading URL for jeopardy may move from \r\n```\r\nhttp:\/\/skeeto.s3.amazonaws.com\/share\/JEOPARDY_QUESTIONS1.json.gz\r\n```\r\n to \r\n```\r\nhttps:\/\/drive.google.com\/file\/d\/0BwT5wj_P7BKXb2hfM3d2RHU1ckE\/view?resourcekey=0-1abK4cJq-mqxFoSg86ieIg\r\n```\r\n\r\n- definite_pronoun_resolution\r\nThe following downloading URL for definite_pronoun_resolution cannot be reached for some reasons.\r\n```\r\nhttp:\/\/www.hlt.utdallas.edu\/~vince\/data\/emnlp12\/train.c.txt\r\n```\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\ndatasets.load_datasets('wiki_auto','manual')\r\ndatasets.load_datasets('jeopardy')\r\ndatasets.load_datasets('definite_pronoun_resolution')\r\n```\r\n\r\n## Expected results\r\nDownload successfully \r\n\r\n## Actual results\r\n- WikiAuto Manual \r\n```\r\nDownloading and preparing dataset wiki_auto\/manual (download: 151.65 MiB, generated: 155.97 MiB, post-processed: Unknown size, total: 307.61 MiB) to \/root\/.cache\/huggingface\/datasets\/wiki_auto\/manual\/1.0.0\/5ffdd9fc62422d29bd02675fb9606f77c1251ee17169ac10b143ce07ef2f4db8...\r\n 0%| | 0\/3 [00:00\r\n main()\r\n File \"wiki_auto.py\", line 40, in main\r\n train, dev, test = dataset.generate_k_shot_data(k=16, seed=seed, path=\"..\/data\/\")\r\n File \"\/workspace\/projects\/CrossFit\/tasks\/fewshot_gym_dataset.py\", line 24, in generate_k_shot_data\r\n dataset = self.load_dataset()\r\n File \"wiki_auto.py\", line 34, in load_dataset\r\n return datasets.load_dataset('wiki_auto', 'manual')\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wiki_auto\/5ffdd9fc62422d29bd02675fb9606f77c1251ee17169ac10b143ce07ef2f4db8\/wiki_auto.py\", line 193, in _split_generators\r\n data_dir = dl_manager.download_and_extract(my_urls)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 196, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 216, in map_nested\r\n mapped = [\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 217, in \r\n _single_map_nested((function, obj, types, None, True))\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 152, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 295, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 592, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/github.com\/chaojiang06\/wiki-auto\/raw\/master\/wiki-manual\/train.tsv\r\n```\r\n- jeopardy\r\n```\r\nUsing custom data configuration default\r\nDownloading and preparing dataset jeopardy\/default (download: 12.13 MiB, generated: 34.46 MiB, post-processed: Unknown size, total: 46.59 MiB) to \/root\/.cache\/huggingface\/datasets\/jeopardy\/default\/0.1.0\/25ee3e4a73755e637b8810f6493fd36e4523dea3ca8a540529d0a6e24c7f9810...\r\nTraceback (most recent call last):\r\n File \"jeopardy.py\", line 45, in \r\n main()\r\n File \"jeopardy.py\", line 42, in main\r\n train, dev, test = dataset.generate_k_shot_data(k=32, seed=seed, path=\"..\/data\/\")\r\n File \"\/workspace\/projects\/CrossFit\/tasks\/fewshot_gym_dataset.py\", line 79, in generate_k_shot_data\r\n dataset = self.load_dataset()\r\n File \"jeopardy.py\", line 36, in load_dataset\r\n return datasets.load_dataset(\"jeopardy\")\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/jeopardy\/25ee3e4a73755e637b8810f6493fd36e4523dea3ca8a540529d0a6e24c7f9810\/jeopardy.py\", line 72, in _split_generators\r\n filepath = dl_manager.download_and_extract(_DATA_URL)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 196, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 206, in map_nested\r\n return function(data_struct)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 295, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 594, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/skeeto.s3.amazonaws.com\/share\/JEOPARDY_QUESTIONS1.json.gz\r\n```\r\n- definite_pronoun_resolution\r\n```\r\nDownloading and preparing dataset definite_pronoun_resolution\/plain_text (download: 222.12 KiB, generated: 239.12 KiB, post-processed: Unknown size, total: 461.24 KiB) to \/root\/.cache\/huggingface\/datasets\/definite_pronoun_resolution\/plain_text\/1.0.0\/35a1dfd4fba4afb8ba226cbbb65ac7cef0dd3cf9302d8f803740f05d2f16ceff...\r\n 0%| | 0\/2 [00:00\r\n main()\r\n File \"definite_pronoun_resolution.py\", line 34, in main\r\n train, dev, test = dataset.generate_k_shot_data(k=32, seed=seed, path=\"..\/data\/\")\r\n File \"\/workspace\/projects\/CrossFit\/tasks\/fewshot_gym_dataset.py\", line 79, in generate_k_shot_data\r\n dataset = self.load_dataset()\r\n File \"definite_pronoun_resolution.py\", line 28, in load_dataset\r\n return datasets.load_dataset('definite_pronoun_resolution')\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/definite_pronoun_resolution\/35a1dfd4fba4afb8ba226cbbb65ac7cef0dd3cf9302d8f803740f05d2f16ceff\/definite_pronoun_resolution.py\", line 76, in _split_generators\r\n files = dl_manager.download_and_extract(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 196, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 216, in map_nested\r\n mapped = [\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 217, in \r\n _single_map_nested((function, obj, types, None, True))\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 152, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 295, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 594, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/www.hlt.utdallas.edu\/~vince\/data\/emnlp12\/train.c.txt\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-4.15.0-161-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.3\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3264\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3263","id":1052552516,"node_id":"I_kwDODunzps4-vK1E","number":3263,"title":"FET DATA","user":{"login":"FStell01","id":90987031,"node_id":"MDQ6VXNlcjkwOTg3MDMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90987031?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/FStell01","html_url":"https:\/\/github.com\/FStell01","followers_url":"https:\/\/api.github.com\/users\/FStell01\/followers","following_url":"https:\/\/api.github.com\/users\/FStell01\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/FStell01\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/FStell01\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/FStell01\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/FStell01\/orgs","repos_url":"https:\/\/api.github.com\/users\/FStell01\/repos","events_url":"https:\/\/api.github.com\/users\/FStell01\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/FStell01\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-13T05:46:06Z","updated_at":"2021-11-13T13:31:47Z","closed_at":"2021-11-13T13:31:47Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3263\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3262","id":1052455082,"node_id":"PR_kwDODunzps4uej4t","number":3262,"title":"asserts replaced with exception for image classification task, csv, json","user":{"login":"manisnesan","id":153142,"node_id":"MDQ6VXNlcjE1MzE0Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/153142?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manisnesan","html_url":"https:\/\/github.com\/manisnesan","followers_url":"https:\/\/api.github.com\/users\/manisnesan\/followers","following_url":"https:\/\/api.github.com\/users\/manisnesan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manisnesan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manisnesan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manisnesan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manisnesan\/orgs","repos_url":"https:\/\/api.github.com\/users\/manisnesan\/repos","events_url":"https:\/\/api.github.com\/users\/manisnesan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manisnesan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-12T22:34:59Z","updated_at":"2021-11-15T11:08:37Z","closed_at":"2021-11-15T11:08:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3262","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3262","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3262.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3262.patch","merged_at":"2021-11-15T11:08:37Z"},"body":"Fixes for csv, json in io module and image_classification task with tests referenced in https:\/\/github.com\/huggingface\/datasets\/issues\/3171","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3262\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3261","id":1052346381,"node_id":"I_kwDODunzps4-uYgN","number":3261,"title":"Scifi_TV_Shows: Having trouble getting viewer to find appropriate files","user":{"login":"lara-martin","id":37913218,"node_id":"MDQ6VXNlcjM3OTEzMjE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37913218?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lara-martin","html_url":"https:\/\/github.com\/lara-martin","followers_url":"https:\/\/api.github.com\/users\/lara-martin\/followers","following_url":"https:\/\/api.github.com\/users\/lara-martin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lara-martin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lara-martin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lara-martin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lara-martin\/orgs","repos_url":"https:\/\/api.github.com\/users\/lara-martin\/repos","events_url":"https:\/\/api.github.com\/users\/lara-martin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lara-martin\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-12T19:25:19Z","updated_at":"2021-12-21T10:24:10Z","closed_at":"2021-12-21T10:24:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*Science Fiction TV Show Plots Corpus (Scifi_TV_Shows)*'\r\n\r\n**Link:** [link](https:\/\/huggingface.co\/datasets\/lara-martin\/Scifi_TV_Shows)\r\n\r\nI tried adding both a script (https:\/\/huggingface.co\/datasets\/lara-martin\/Scifi_TV_Shows\/blob\/main\/Scifi_TV_Shows.py) and some dummy examples (https:\/\/huggingface.co\/datasets\/lara-martin\/Scifi_TV_Shows\/tree\/main\/dummy), but the viewer still has a 404 error (\"Not found. Maybe the cache is missing, or maybe the ressource does not exist.\"). I'm not sure what to try next. Thanks in advance!\r\n\r\nAm I the one who added this dataset? Yes\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3261\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3260","id":1052247373,"node_id":"PR_kwDODunzps4ueCIU","number":3260,"title":"Fix ConnectionError in Scielo dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-12T18:02:37Z","updated_at":"2021-11-16T18:18:17Z","closed_at":"2021-11-16T17:55:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3260","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3260","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3260.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3260.patch","merged_at":"2021-11-16T17:55:22Z"},"body":"This PR:\r\n* allows 403 status code in HEAD requests to S3 buckets to fix the connection error in the Scielo dataset (instead of `url`, uses `response.url` to check the URL of the final endpoint)\r\n* makes the Scielo dataset streamable\r\n\r\nFixes #3255. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3260\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3259","id":1052189775,"node_id":"PR_kwDODunzps4ud5W3","number":3259,"title":"Updating details of IRC disentanglement data","user":{"login":"jkkummerfeld","id":1298052,"node_id":"MDQ6VXNlcjEyOTgwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1298052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jkkummerfeld","html_url":"https:\/\/github.com\/jkkummerfeld","followers_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/followers","following_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/orgs","repos_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/repos","events_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jkkummerfeld\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-12T17:16:58Z","updated_at":"2021-11-18T17:19:33Z","closed_at":"2021-11-18T17:19:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3259","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3259","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3259.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3259.patch","merged_at":"2021-11-18T17:19:33Z"},"body":"I was pleasantly surprised to find that someone had already added my dataset to the huggingface library, but some details were missing or incorrect. This PR fixes the documentation.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3259\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3258","id":1052188195,"node_id":"I_kwDODunzps4-tx4j","number":3258,"title":"Reload dataset that was already downloaded with `load_from_disk` from cloud storage","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-12T17:14:59Z","updated_at":"2021-11-12T17:14:59Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`load_from_disk` downloads the dataset to a temporary directory without checking if the dataset has already been downloaded once.\r\nIt would be nice to have some sort of caching for datasets downloaded this way. This could leverage the fingerprint of the dataset that was saved in the `state.json` file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3258\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3257","id":1052118365,"node_id":"I_kwDODunzps4-tg1d","number":3257,"title":"Use f-strings for string formatting ","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false},"assignees":[{"login":"Mehdi2402","id":56029953,"node_id":"MDQ6VXNlcjU2MDI5OTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56029953?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehdi2402","html_url":"https:\/\/github.com\/Mehdi2402","followers_url":"https:\/\/api.github.com\/users\/Mehdi2402\/followers","following_url":"https:\/\/api.github.com\/users\/Mehdi2402\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehdi2402\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehdi2402\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehdi2402\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehdi2402\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehdi2402\/repos","events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehdi2402\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-11-12T16:02:15Z","updated_at":"2021-11-17T16:18:38Z","closed_at":"2021-11-17T16:18:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"f-strings offer better readability\/performance than `str.format` and `%`, so we should use them in all places in our codebase unless there is good reason to keep the older syntax.\r\n\r\n> **NOTE FOR CONTRIBUTORS**: To avoid large PRs and possible merge conflicts, do 1-3 modules per PR. Also, feel free to ignore the files located under `datasets\/*`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3257\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3256","id":1052000613,"node_id":"PR_kwDODunzps4udTqg","number":3256,"title":"asserts replaced by exception for text classification task with test.","user":{"login":"manisnesan","id":153142,"node_id":"MDQ6VXNlcjE1MzE0Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/153142?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manisnesan","html_url":"https:\/\/github.com\/manisnesan","followers_url":"https:\/\/api.github.com\/users\/manisnesan\/followers","following_url":"https:\/\/api.github.com\/users\/manisnesan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manisnesan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manisnesan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manisnesan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manisnesan\/orgs","repos_url":"https:\/\/api.github.com\/users\/manisnesan\/repos","events_url":"https:\/\/api.github.com\/users\/manisnesan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manisnesan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-12T14:05:36Z","updated_at":"2021-11-12T15:09:33Z","closed_at":"2021-11-12T14:59:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3256","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3256","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3256.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3256.patch","merged_at":"2021-11-12T14:59:32Z"},"body":"I have replaced only a single assert in text_classification.py along with a unit test to verify an exception is raised based on https:\/\/github.com\/huggingface\/datasets\/issues\/3171 . \r\n\r\nI would like to first understand the code contribution workflow. So keeping the change to a single file rather than making too many changes. Once this gets approved, I will look into the rest. \r\n\r\nThanks. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3256\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3255","id":1051783129,"node_id":"I_kwDODunzps4-sO_Z","number":3255,"title":"SciELO dataset ConnectionError","user":{"login":"WojciechKusa","id":2575047,"node_id":"MDQ6VXNlcjI1NzUwNDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2575047?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/WojciechKusa","html_url":"https:\/\/github.com\/WojciechKusa","followers_url":"https:\/\/api.github.com\/users\/WojciechKusa\/followers","following_url":"https:\/\/api.github.com\/users\/WojciechKusa\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/WojciechKusa\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/WojciechKusa\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/WojciechKusa\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/WojciechKusa\/orgs","repos_url":"https:\/\/api.github.com\/users\/WojciechKusa\/repos","events_url":"https:\/\/api.github.com\/users\/WojciechKusa\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/WojciechKusa\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-11-12T09:57:14Z","updated_at":"2021-11-16T17:55:22Z","closed_at":"2021-11-16T17:55:22Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI get `ConnectionError` when I am trying to load the SciELO dataset. \r\n\r\n\r\nWhen I try the URL with `requests` I get:\r\n```\r\n>>> requests.head(\"https:\/\/ndownloader.figstatic.com\/files\/14019287\")\r\n\r\n```\r\nAnd as far as I understand redirections in `datasets` are not supported for downloads. \r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/807341d0db0728073ab605c812c67f927d148f38\/datasets\/scielo\/scielo.py#L45 \r\n\r\n\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"scielo\", \"en-es\")\r\n```\r\n\r\n## Expected results\r\nDownload SciELO dataset and load Dataset object\r\n\r\n\r\n## Actual results\r\n\r\n```\r\nDownloading and preparing dataset scielo\/en-es (download: 21.90 MiB, generated: 68.45 MiB, post-processed: Unknown size, total: 90.35 MiB) to \/Users\/test\/.cache\/huggingface\/datasets\/scielo\/en-es\/1.0.0\/7e05d55a20257efeb9925ff5de65bd4884fc6ddb6d765f1ea3e8860449d90e0e...\r\nTraceback (most recent call last):\r\n File \"scielo.py\", line 3, in \r\n dataset = load_dataset(\"scielo\", \"en-es\")\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/Users\/test\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/scielo\/7e05d55a20257efeb9925ff5de65bd4884fc6ddb6d765f1ea3e8860449d90e0e\/scielo.py\", line 77, in _split_generators\r\n data_dir = dl_manager.download_and_extract(_URLS[self.config.name])\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 196, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 206, in map_nested\r\n return function(data_struct)\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 217, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 295, in cached_path\r\n output_path = get_from_cache(\r\n File \"..\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 594, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/ndownloader.figstatic.com\/files\/14019287\r\n\r\n```\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.12\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3255\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3254","id":1051351172,"node_id":"PR_kwDODunzps4ubPwR","number":3254,"title":"Update xcopa dataset (fix checksum issues + add translated data)","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-11T20:51:33Z","updated_at":"2021-11-12T10:30:58Z","closed_at":"2021-11-12T10:30:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3254","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3254","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3254.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3254.patch","merged_at":"2021-11-12T10:30:57Z"},"body":"This PR updates the checksums (as reported [here](https:\/\/discuss.huggingface.co\/t\/how-to-load-dataset-locally\/11601\/2)) of the `xcopa` dataset. Additionally, it adds new configs that hold the translated data of the original set of configs. This data was not available at the time of adding this dataset to the lib.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3254\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3253","id":1051308972,"node_id":"I_kwDODunzps4-qbOs","number":3253,"title":"`GeneratorBasedBuilder` does not support `None` values","user":{"login":"pavel-lexyr","id":69010336,"node_id":"MDQ6VXNlcjY5MDEwMzM2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69010336?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pavel-lexyr","html_url":"https:\/\/github.com\/pavel-lexyr","followers_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/followers","following_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/orgs","repos_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/repos","events_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pavel-lexyr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-11T19:51:21Z","updated_at":"2021-12-09T14:26:58Z","closed_at":"2021-12-09T14:26:58Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n`GeneratorBasedBuilder` does not support `None` values.\r\n\r\n## Steps to reproduce the bug\r\nSee [this repository](https:\/\/github.com\/pavel-lexyr\/huggingface-datasets-bug-reproduction) for minimal reproduction.\r\n\r\n## Expected results\r\nDataset is initialized with a `None` value in the `value` column.\r\n\r\n## Actual results\r\n```\r\nTraceback (most recent call last):\r\n File \"main.py\", line 3, in \r\n datasets.load_dataset(\".\/bad-data\")\r\n File \"...\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"...\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"...\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"...\/datasets\/builder.py\", line 1103, in _prepare_split\r\n example = self.info.features.encode_example(record)\r\n File \"...\/datasets\/features\/features.py\", line 1033, in encode_example\r\n return encode_nested_example(self, example)\r\n File \"...\/datasets\/features\/features.py\", line 808, in encode_nested_example\r\n return {\r\n File \"...\/datasets\/features\/features.py\", line 809, in \r\n k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n File \"...\/datasets\/features\/features.py\", line 855, in encode_nested_example\r\n return schema.encode_example(obj)\r\n File \"...\/datasets\/features\/features.py\", line 299, in encode_example\r\n return float(value)\r\nTypeError: float() argument must be a string or a number, not 'NoneType'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-5.4.0-81-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 6.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3253\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3252","id":1051124749,"node_id":"PR_kwDODunzps4uagoy","number":3252,"title":"Fix failing CER metric test in CI after update","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-11T15:57:16Z","updated_at":"2021-11-12T14:06:44Z","closed_at":"2021-11-12T14:06:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3252","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3252","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3252.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3252.patch","merged_at":"2021-11-12T14:06:43Z"},"body":"Fixes the [failing CER metric test](https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8644\/workflows\/79816553-fa2f-4756-b022-d5937f00bf7b\/jobs\/53298) in CI by adding support for `jiwer==2.3.0`, which was released yesterday. Also, I verified that all the tests in `metrics\/cer\/test_cer.py` pass after the change, so the results should be the same irrespective of the `jiwer` version.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3252\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3250","id":1050541348,"node_id":"PR_kwDODunzps4uYmkr","number":3250,"title":"Add ETHICS dataset","user":{"login":"ssss1029","id":7088559,"node_id":"MDQ6VXNlcjcwODg1NTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7088559?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ssss1029","html_url":"https:\/\/github.com\/ssss1029","followers_url":"https:\/\/api.github.com\/users\/ssss1029\/followers","following_url":"https:\/\/api.github.com\/users\/ssss1029\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ssss1029\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ssss1029\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ssss1029\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ssss1029\/orgs","repos_url":"https:\/\/api.github.com\/users\/ssss1029\/repos","events_url":"https:\/\/api.github.com\/users\/ssss1029\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ssss1029\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-11T03:45:34Z","updated_at":"2021-11-16T18:32:25Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3250","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3250","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3250.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3250.patch","merged_at":null},"body":"This PR adds the ETHICS dataset, including all 5 sub-datasets.\r\nFrom https:\/\/arxiv.org\/abs\/2008.02275","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3250\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3249","id":1050193138,"node_id":"PR_kwDODunzps4uXeea","number":3249,"title":"Fix streaming for id_newspapers_2018","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-10T18:55:30Z","updated_at":"2021-11-12T14:01:32Z","closed_at":"2021-11-12T14:01:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3249","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3249","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3249.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3249.patch","merged_at":"2021-11-12T14:01:31Z"},"body":"To be compatible with streaming, this dataset must use `dl_manager.iter_archive` since the data are in a .tgz file","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3249\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3248","id":1050171082,"node_id":"PR_kwDODunzps4uXZzU","number":3248,"title":"Stream from Google Drive and other hosts","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-11-10T18:32:32Z","updated_at":"2021-11-30T16:03:43Z","closed_at":"2021-11-12T17:18:11Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3248","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3248","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3248.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3248.patch","merged_at":"2021-11-12T17:18:10Z"},"body":"Streaming from Google Drive is a bit more challenging than the other host we've been supporting:\r\n- the download URL must be updated to add the confirm token obtained by HEAD request\r\n- it requires to use cookies to keep the connection alive\r\n- the URL doesn't tell any information about whether the file is compressed or not\r\n\r\nTherefore I did two things:\r\n- I added a step for URL and headers\/cookies preparation in the StreamingDownloadManager\r\n- I added automatic compression type inference by reading the [magic number](https:\/\/en.wikipedia.org\/wiki\/List_of_file_signatures)\r\n\r\nThis allows to do do fancy things like\r\n```python\r\nfrom datasets.utils.streaming_download_manager import StreamingDownloadManager, xopen, xjoin, xglob\r\n\r\n# zip file containing a train.tsv file\r\nurl = \"https:\/\/drive.google.com\/uc?export=download&id=1k92sUfpHxKq8PXWRr7Y5aNHXwOCNUmqh\"\r\n\r\nextracted = StreamingDownloadManager().download_and_extract(url)\r\nfor inner_file in xglob(xjoin(extracted, \"*.tsv\")):\r\n with xopen(inner_file) as f:\r\n # streaming starts here\r\n for line in f:\r\n print(line)\r\n```\r\n\r\nThis should make around 80 datasets streamable. It concerns those hosted on Google Drive but also any dataset for which the URL doesn't give any information about compression. Here is the full list:\r\n\r\n```\r\namazon_polarity, ami, arabic_billion_words, ascent_kb, asset, big_patent, billsum, capes, cmrc2018, cnn_dailymail,\r\ncode_x_glue_cc_code_completion_token, code_x_glue_cc_code_refinement, code_x_glue_cc_code_to_code_trans,\r\ncode_x_glue_tt_text_to_text, conll2002, craigslist_bargains, dbpedia_14, docred, ehealth_kd, emo, euronews, germeval_14,\r\ngigaword, grail_qa, great_code, has_part, head_qa, health_fact, hope_edi, id_newspapers_2018,\r\nigbo_english_machine_translation, irc_disentangle, jfleg, jnlpba, journalists_questions, kor_ner, linnaeus, med_hop, mrqa,\r\nmt_eng_vietnamese, multi_news, norwegian_ner, offcombr, offenseval_dravidian, para_pat, peoples_daily_ner, pn_summary,\r\npoleval2019_mt, pubmed_qa, qangaroo, reddit_tifu, refresd, ro_sts_parallel, russian_super_glue, samsum, sberquad, scielo,\r\nsearch_qa, species_800, spider, squad_adversarial, tamilmixsentiment, tashkeela, ted_talks_iwslt, trec, turk, turkish_ner,\r\ntwi_text_c3, universal_morphologies, web_of_science, weibo_ner, wiki_bio, wiki_hop, wiki_lingua, wiki_summary, wili_2018,\r\nwisesight1000, wnut_17, yahoo_answers_topics, yelp_review_full, yoruba_text_c3\r\n```\r\n\r\nSome of them may not work if the host doesn't support HTTP range requests for example\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2742\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/3188","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3248\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3247","id":1049699088,"node_id":"I_kwDODunzps4-kSMQ","number":3247,"title":"Loading big json dataset raises pyarrow.lib.ArrowNotImplementedError","user":{"login":"maxzirps","id":29249513,"node_id":"MDQ6VXNlcjI5MjQ5NTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29249513?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxzirps","html_url":"https:\/\/github.com\/maxzirps","followers_url":"https:\/\/api.github.com\/users\/maxzirps\/followers","following_url":"https:\/\/api.github.com\/users\/maxzirps\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxzirps\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxzirps\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxzirps\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxzirps\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxzirps\/repos","events_url":"https:\/\/api.github.com\/users\/maxzirps\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxzirps\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-10T11:17:59Z","updated_at":"2021-11-12T10:25:53Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen trying to create a dataset from a json file with around 25MB, the following error is raised `pyarrow.lib.ArrowNotImplementedError: Unsupported cast from struct to struct using function cast_struct`\r\n\r\nSplitting the big file into smaller ones and then loading it with the `load_dataset` method did also not work.\r\n\r\nCreating a pandas dataframe from it and then loading it with `Dataset.from_pandas` works\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset(\"json\", data_files=\"test.json\")\r\n```\r\n\r\ntest.json ~25MB\r\n```json\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n{\"a\": {\"b\": 7, \"c\": 6}}\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n{\"a\": {\"b\": 7, \"c\": 6}}\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n...\r\n```\r\n\r\nworking.json ~160bytes\r\n```json\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n{\"a\": {\"b\": 7, \"c\": 6}}\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n{\"a\": {\"b\": 7, \"c\": 6}}\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n```\r\n\r\n## Expected results\r\nIt should load the dataset from the json file without error.\r\n\r\n## Actual results\r\nIt raises Exception `pyarrow.lib.ArrowNotImplementedError: Unsupported cast from struct to struct using function cast_struct`\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\/Users\/m\/workspace\/xxx\/project\/main.py\", line 60, in \r\n dataset = load_dataset(\"json\", data_files=\"result.json\")\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/datasets\/load.py\", line 1627, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 1159, in _prepare_split\r\n writer.write_table(table)\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/datasets\/arrow_writer.py\", line 428, in write_table\r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"pyarrow\/table.pxi\", line 1685, in pyarrow.lib.Table.from_arrays\r\n File \"pyarrow\/table.pxi\", line 630, in pyarrow.lib._sanitize_arrays\r\n File \"pyarrow\/array.pxi\", line 338, in pyarrow.lib.asarray\r\n File \"pyarrow\/table.pxi\", line 304, in pyarrow.lib.ChunkedArray.cast\r\n File \"\/opt\/homebrew\/Caskroom\/miniforge\/base\/envs\/xxx\/lib\/python3.9\/site-packages\/pyarrow\/compute.py\", line 309, in cast\r\n return call_function(\"cast\", [arr], options)\r\n File \"pyarrow\/_compute.pyx\", line 528, in pyarrow._compute.call_function\r\n File \"pyarrow\/_compute.pyx\", line 327, in pyarrow._compute.Function.call\r\n File \"pyarrow\/error.pxi\", line 143, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 120, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowNotImplementedError: Unsupported cast from struct to struct using function cast_struct\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.0\r\n- Platform: macOS-12.0.1-arm64-arm-64bit\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3247\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3246","id":1049662746,"node_id":"PR_kwDODunzps4uVvaW","number":3246,"title":"[tiny] fix typo in stream docs","user":{"login":"nollied","id":26421036,"node_id":"MDQ6VXNlcjI2NDIxMDM2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26421036?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nollied","html_url":"https:\/\/github.com\/nollied","followers_url":"https:\/\/api.github.com\/users\/nollied\/followers","following_url":"https:\/\/api.github.com\/users\/nollied\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nollied\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nollied\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nollied\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nollied\/orgs","repos_url":"https:\/\/api.github.com\/users\/nollied\/repos","events_url":"https:\/\/api.github.com\/users\/nollied\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nollied\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-10T10:40:02Z","updated_at":"2021-11-10T11:10:39Z","closed_at":"2021-11-10T11:10:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3246","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3246","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3246.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3246.patch","merged_at":"2021-11-10T11:10:39Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3246\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3245","id":1048726062,"node_id":"PR_kwDODunzps4uSqqq","number":3245,"title":"Fix load_from_disk temporary directory","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-09T15:15:15Z","updated_at":"2021-11-09T15:30:52Z","closed_at":"2021-11-09T15:30:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3245","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3245","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3245.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3245.patch","merged_at":"2021-11-09T15:30:51Z"},"body":"`load_from_disk` uses `tempfile.TemporaryDirectory()` instead of our `get_temporary_cache_files_directory()` function. This can cause the temporary directory to be deleted before the dataset object is garbage collected.\r\n\r\nIn practice, it prevents anyone from using methods like `shuffle` on a dataset loaded this way, because it can't write the shuffled indices in a directory that doesn't exist anymore.\r\n\r\nIn this PR I switch to using `get_temporary_cache_files_directory()` and I update the tests.\r\n\r\ncc @mariosasko since you worked on `get_temporary_cache_files_directory()`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3245\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3244","id":1048675741,"node_id":"PR_kwDODunzps4uSgG5","number":3244,"title":"Fix filter method for batched=True","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-09T14:30:59Z","updated_at":"2021-11-09T15:52:58Z","closed_at":"2021-11-09T15:52:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3244","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3244","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3244.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3244.patch","merged_at":"2021-11-09T15:52:57Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3244\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3243","id":1048630754,"node_id":"PR_kwDODunzps4uSWtB","number":3243,"title":"Remove redundant isort module placement","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-09T13:50:30Z","updated_at":"2021-11-12T14:02:45Z","closed_at":"2021-11-12T14:02:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3243","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3243","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3243.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3243.patch","merged_at":"2021-11-12T14:02:45Z"},"body":"`isort` can place modules by itself from [version 5.0.0](https:\/\/pycqa.github.io\/isort\/docs\/upgrade_guides\/5.0.0.html#module-placement-changes-known_third_party-known_first_party-default_section-etc) onwards, making the `known_first_party` and `known_third_party` fields in `setup.cfg` redundant (this is why our CI works, even though we haven't touched these options in a while).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3243\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3242","id":1048527232,"node_id":"I_kwDODunzps4-f0GA","number":3242,"title":"Adding ANERcorp-CAMeLLab dataset","user":{"login":"vitalyshalumov","id":33824221,"node_id":"MDQ6VXNlcjMzODI0MjIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33824221?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vitalyshalumov","html_url":"https:\/\/github.com\/vitalyshalumov","followers_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/followers","following_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/orgs","repos_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/repos","events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-09T12:04:04Z","updated_at":"2021-11-09T12:41:15Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3242\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3241","id":1048461852,"node_id":"PR_kwDODunzps4uRzHa","number":3241,"title":"Swap descriptions of v1 and raw-v1 configs of WikiText dataset and fix metadata","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-09T10:54:15Z","updated_at":"2021-11-09T13:49:29Z","closed_at":"2021-11-09T13:49:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3241","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3241","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3241.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3241.patch","merged_at":"2021-11-09T13:49:28Z"},"body":"Fix #3237.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3241\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3240","id":1048376021,"node_id":"I_kwDODunzps4-fPLV","number":3240,"title":"Couldn't reach data file for disaster_response_messages","user":{"login":"pandya6988","id":81331791,"node_id":"MDQ6VXNlcjgxMzMxNzkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/81331791?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pandya6988","html_url":"https:\/\/github.com\/pandya6988","followers_url":"https:\/\/api.github.com\/users\/pandya6988\/followers","following_url":"https:\/\/api.github.com\/users\/pandya6988\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pandya6988\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pandya6988\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pandya6988\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pandya6988\/orgs","repos_url":"https:\/\/api.github.com\/users\/pandya6988\/repos","events_url":"https:\/\/api.github.com\/users\/pandya6988\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pandya6988\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-09T09:26:42Z","updated_at":"2021-12-14T14:38:29Z","closed_at":"2021-12-14T14:38:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nFollowing command gives an ConnectionError.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndisaster = load_dataset('disaster_response_messages')\r\n```\r\n\r\n## Error\r\n```\r\nConnectionError: Couldn't reach https:\/\/datasets.appen.com\/appen_datasets\/disaster_response_data\/disaster_response_messages_training.csv\r\n```\r\n## Expected results\r\nIt should load dataset without an error\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform: Google Colab\r\n- Python version: 3.7\r\n- PyArrow version: \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3240\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3239","id":1048360232,"node_id":"I_kwDODunzps4-fLUo","number":3239,"title":"Inconsistent performance of the \"arabic_billion_words\" dataset","user":{"login":"vitalyshalumov","id":33824221,"node_id":"MDQ6VXNlcjMzODI0MjIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33824221?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vitalyshalumov","html_url":"https:\/\/github.com\/vitalyshalumov","followers_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/followers","following_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/orgs","repos_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/repos","events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-09T09:11:00Z","updated_at":"2021-11-09T09:11:00Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen downloaded from macine 1 the dataset is downloaded and parsed correctly.\r\nWhen downloaded from machine two (which has a different cache directory),\r\nthe following script:\r\n\r\nimport datasets\r\nfrom datasets import load_dataset\r\nraw_dataset_elkhair_1 = load_dataset('arabic_billion_words', 'Alittihad', split=\"train\",download_mode='force_redownload')\r\n\r\ngives the following error:\r\n\r\n**Downloading and preparing dataset arabic_billion_words\/Alittihad (download: 332.13 MiB, generated: 1.49 GiB, post-processed: Unknown size, total: 1.82 GiB) to \/root\/.cache\/huggingface\/datasets\/arabic_billion_words\/Alittihad\/1.1.0\/687a1f963284c8a766558661375ea8f7ab3fa3633f8cd9c9f42a53ebe83bfe17...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 348M\/348M [00:24<00:00, 14.0MB\/s]\r\nTraceback (most recent call last):\r\n File \"...\/why_mismatch.py\", line 3, in \r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 709, in _download_and_prepare\r\n verify_splits(self.info.splits, split_dict)\r\n File \"\/opt\/conda\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 74, in verify_splits\r\n raise NonMatchingSplitsSizesError(str(bad_splits))\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=1601790302, num_examples=349342, dataset_name='arabic_billion_words'), 'recorded': SplitInfo(name='train', num_bytes=0, num_examples=0, dataset_name='arabic_billion_words')}]**\r\n\r\n\r\n\r\n\r\nNote that the package versions of datasets (1.15.1) and rarfile (4.0) are identical.\r\n\r\n\r\n## Steps to reproduce the bug\r\nimport datasets\r\nfrom datasets import load_dataset\r\nraw_dataset_elkhair_1 = load_dataset('arabic_billion_words', 'Alittihad', split=\"train\",download_mode='force_redownload')\r\n\r\n\r\n# Sample code to reproduce the bug\r\n\r\n## Expected results\r\nDownloading and preparing dataset arabic_billion_words\/Alittihad (download: 332.13 MiB, generated: 1.49 GiB, post-processed: Unknown size, total: 1.82 GiB) to ...\/.cache\/huggingface\/datasets\/arabic_billion_words\/Alittihad\/1.1.0\/687a1f963284c8a766558661375ea8f7ab3fa3633f8cd9c9f42a53ebe83bfe17...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 348M\/348M [00:22<00:00, 15.8MB\/s]\r\nDataset arabic_billion_words downloaded and prepared to ...\/.cache\/huggingface\/datasets\/arabic_billion_words\/Alittihad\/1.1.0\/687a1f963284c8a766558661375ea8f7ab3fa3633f8cd9c9f42a53ebe83bfe17. Subsequent calls will reuse this data.\r\n\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\nMachine 1:\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-5.8.0-63-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1\r\n\r\nMachine 2 (the bugged one)\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-4.4.0-210-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.8\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3239\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3238","id":1048226086,"node_id":"I_kwDODunzps4-eqkm","number":3238,"title":"Reuters21578 Couldn't reach ","user":{"login":"TingNLP","id":54096137,"node_id":"MDQ6VXNlcjU0MDk2MTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/54096137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TingNLP","html_url":"https:\/\/github.com\/TingNLP","followers_url":"https:\/\/api.github.com\/users\/TingNLP\/followers","following_url":"https:\/\/api.github.com\/users\/TingNLP\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TingNLP\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TingNLP\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TingNLP\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TingNLP\/orgs","repos_url":"https:\/\/api.github.com\/users\/TingNLP\/repos","events_url":"https:\/\/api.github.com\/users\/TingNLP\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TingNLP\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-09T06:08:56Z","updated_at":"2021-11-11T00:02:57Z","closed_at":"2021-11-11T00:02:57Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"``## Adding a Dataset\r\n- **Name:** *Reuters21578*\r\n- **Description:** *ConnectionError: Couldn't reach https:\/\/kdd.ics.uci.edu\/databases\/reuters21578\/reuters21578.tar.gz*\r\n- **Data:** *https:\/\/huggingface.co\/datasets\/reuters21578*\r\n\r\n`from datasets import load_dataset`\r\n`dataset = load_dataset(\"reuters21578\", 'ModLewis')`\r\n\r\nConnectionError: Couldn't reach https:\/\/kdd.ics.uci.edu\/databases\/reuters21578\/reuters21578.tar.gz\r\n\r\nAnd I try to request the link as follow:\r\n`import requests`\r\n`requests.head('https:\/\/kdd.ics.uci.edu\/databases\/reuters21578\/reuters21578.tar.gz')`\r\n\r\nSSLError: HTTPSConnectionPool(host='kdd.ics.uci.edu', port=443): Max retries exceeded with url: \/databases\/reuters21578\/reuters21578.tar.gz (Caused by SSLError(SSLError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:852)'),))\r\n\r\nThis problem likes #575\r\nWhat should I do ?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3238\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3237","id":1048165525,"node_id":"I_kwDODunzps4-ebyV","number":3237,"title":"wikitext description wrong","user":{"login":"hongyuanmei","id":19693633,"node_id":"MDQ6VXNlcjE5NjkzNjMz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19693633?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hongyuanmei","html_url":"https:\/\/github.com\/hongyuanmei","followers_url":"https:\/\/api.github.com\/users\/hongyuanmei\/followers","following_url":"https:\/\/api.github.com\/users\/hongyuanmei\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hongyuanmei\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hongyuanmei\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hongyuanmei\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hongyuanmei\/orgs","repos_url":"https:\/\/api.github.com\/users\/hongyuanmei\/repos","events_url":"https:\/\/api.github.com\/users\/hongyuanmei\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hongyuanmei\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-11-09T04:06:52Z","updated_at":"2021-11-09T13:49:28Z","closed_at":"2021-11-09T13:49:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nDescriptions of the wikitext datasests are wrong. \r\n\r\n## Steps to reproduce the bug\r\nPlease see: https:\/\/github.com\/huggingface\/datasets\/blob\/f6dcafce996f39b6a4bbe3a9833287346f4a4b68\/datasets\/wikitext\/wikitext.py#L50\r\n\r\n## Expected results\r\nThe descriptions for raw-v1 and v1 should be switched. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3237\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3236","id":1048026358,"node_id":"I_kwDODunzps4-d5z2","number":3236,"title":"Loading of datasets changed in #3110 returns no examples ","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2021-11-08T23:29:46Z","updated_at":"2021-11-09T16:46:05Z","closed_at":"2021-11-09T16:45:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nLoading of datasets changed in https:\/\/github.com\/huggingface\/datasets\/pull\/3110 returns no examples:\r\n```python\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['id', 'title', 'abstract', 'full_text', 'qas'],\r\n num_rows: 0\r\n })\r\n validation: Dataset({\r\n features: ['id', 'title', 'abstract', 'full_text', 'qas'],\r\n num_rows: 0\r\n })\r\n})\r\n```\r\n\r\n## Steps to reproduce the bug\r\nLoad any of the datasets that were changed in https:\/\/github.com\/huggingface\/datasets\/pull\/3110:\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset(\"qasper\")\r\n\r\n# The problem only started with the commit of #3110\r\nload_dataset(\"qasper\", revision=\"b6469baa22c174b3906c631802a7016fedea6780\")\r\n```\r\n\r\n## Expected results\r\n```python\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['id', 'title', 'abstract', 'full_text', 'qas'],\r\n num_rows: 888\r\n })\r\n validation: Dataset({\r\n features: ['id', 'title', 'abstract', 'full_text', 'qas'],\r\n num_rows: 281\r\n })\r\n})\r\n```\r\nWhich can be received when specifying revision of the commit before https:\/\/github.com\/huggingface\/datasets\/pull\/3110:\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset(\"qasper\", revision=\"acfe2abda1ca79f0ce5c1896aa83b4b78af76b7d\")\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.2.dev0 (master)\r\n- Python version: 3.8.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3236\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3235","id":1047808263,"node_id":"PR_kwDODunzps4uPr9Z","number":3235,"title":"Addd options to use updated bleurt checkpoints","user":{"login":"jaehlee","id":11873078,"node_id":"MDQ6VXNlcjExODczMDc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11873078?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaehlee","html_url":"https:\/\/github.com\/jaehlee","followers_url":"https:\/\/api.github.com\/users\/jaehlee\/followers","following_url":"https:\/\/api.github.com\/users\/jaehlee\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaehlee\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaehlee\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaehlee\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaehlee\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaehlee\/repos","events_url":"https:\/\/api.github.com\/users\/jaehlee\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaehlee\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-08T18:53:54Z","updated_at":"2021-11-12T14:05:28Z","closed_at":"2021-11-12T14:05:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3235","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3235","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3235.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3235.patch","merged_at":"2021-11-12T14:05:28Z"},"body":"Adds options to use newer recommended checkpoint (as of 2021\/10\/8) bleurt-20 and its distilled versions. \r\n\r\nUpdated checkpoints are described in https:\/\/github.com\/google-research\/bleurt\/blob\/master\/checkpoints.md#the-recommended-checkpoint-bleurt-20\r\n\r\nThis change won't affect the default behavior of metrics\/bleurt. It only adds option to load newer checkpoints as\r\n\r\n`datasets.load_metric('bleurt', 'bleurt-20')`\r\n\r\n`bluert-20` generates scores roughly between 0 and 1, which wasn't the case for the previous checkpoints. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3235\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3234","id":1047634236,"node_id":"PR_kwDODunzps4uPHRk","number":3234,"title":"Avoid PyArrow type optimization if it fails","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-11-08T16:10:27Z","updated_at":"2021-11-10T12:04:29Z","closed_at":"2021-11-10T12:04:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3234","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3234","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3234.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3234.patch","merged_at":"2021-11-10T12:04:28Z"},"body":"Adds a new variable, `DISABLE_PYARROW_TYPES_OPTIMIZATION`, to `config.py` for easier control of the Arrow type optimization.\r\n\r\nFix #2206 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3234\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3233","id":1047474931,"node_id":"PR_kwDODunzps4uOl9-","number":3233,"title":"Improve repository structure docs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-08T13:51:35Z","updated_at":"2021-11-09T10:02:18Z","closed_at":"2021-11-09T10:02:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3233","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3233","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3233.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3233.patch","merged_at":"2021-11-09T10:02:17Z"},"body":"Continuation of the documentation started in https:\/\/github.com\/huggingface\/datasets\/pull\/3221, taking into account @stevhliu 's comments","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3233\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3232","id":1047361573,"node_id":"I_kwDODunzps4-bXgl","number":3232,"title":"The Xsum datasets seems not able to download.","user":{"login":"FYYFU","id":37999885,"node_id":"MDQ6VXNlcjM3OTk5ODg1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37999885?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/FYYFU","html_url":"https:\/\/github.com\/FYYFU","followers_url":"https:\/\/api.github.com\/users\/FYYFU\/followers","following_url":"https:\/\/api.github.com\/users\/FYYFU\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/FYYFU\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/FYYFU\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/FYYFU\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/FYYFU\/orgs","repos_url":"https:\/\/api.github.com\/users\/FYYFU\/repos","events_url":"https:\/\/api.github.com\/users\/FYYFU\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/FYYFU\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-11-08T11:58:54Z","updated_at":"2021-11-09T15:07:16Z","closed_at":"2021-11-09T15:07:16Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe download Link of the Xsum dataset provided in the repository is [Link](http:\/\/bollin.inf.ed.ac.uk\/public\/direct\/XSUM-EMNLP18-Summary-Data-Original.tar.gz). It seems not able to download.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset('xsum')\r\n```\r\n\r\n\r\n## Actual results\r\n``` python\r\nraise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/bollin.inf.ed.ac.uk\/public\/direct\/XSUM-EMNLP18-Summary-Data-Original.tar.gz\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3232\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3231","id":1047170906,"node_id":"PR_kwDODunzps4uNmWT","number":3231,"title":"Group tests in multiprocessing workers by test file","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-08T08:46:03Z","updated_at":"2021-11-08T13:19:18Z","closed_at":"2021-11-08T08:59:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3231","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3231","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3231.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3231.patch","merged_at":"2021-11-08T08:59:43Z"},"body":"By grouping tests by test file, we make sure that all the tests in `test_load.py` are sent to the same worker.\r\n\r\nTherefore, the fixture `hf_token` will be called only once (and from the same worker).\r\n\r\nRelated to: #3200.\r\nFix #3219.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3231\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3230","id":1047135583,"node_id":"PR_kwDODunzps4uNfEd","number":3230,"title":"Add full tagset to conll2003 README","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-08T08:06:04Z","updated_at":"2021-11-09T10:48:38Z","closed_at":"2021-11-09T10:40:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3230","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3230","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3230.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3230.patch","merged_at":"2021-11-09T10:40:58Z"},"body":"Even though it is possible to manually get the tagset list with\r\n\r\n```python\r\ndset.features[field_name].feature.names\r\n```\r\n\r\nI think it is useful to have an overview of the used tagset on the dataset card. This is particularly useful in light of the **dataset viewer**: the tags are encoded, so it is not immediately obvious what they are for a given sample. Adding a label-int mapping should make it easier for visitors to get a grasp of what they mean.\r\n\r\nFrom user-experience perspective, I would urge the full tagsets to always be available in the README's but I understand that that would take a lot of work, probably. Perhaps it can be automated?\r\n\r\ncloses #3189 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3230\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3229","id":1046706425,"node_id":"PR_kwDODunzps4uMKsx","number":3229,"title":"Fix URL in CITATION file","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-07T10:04:35Z","updated_at":"2021-11-07T10:04:46Z","closed_at":"2021-11-07T10:04:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3229","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3229","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3229.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3229.patch","merged_at":"2021-11-07T10:04:45Z"},"body":"Currently the BibTeX citation parsed from the CITATION file has wrong URL (it shows the repo URL instead of the proceedings paper URL):\r\n```\r\n@inproceedings{Lhoest_Datasets_A_Community_2021,\r\nauthor = {Lhoest, Quentin and Villanova del Moral, Albert and von Platen, Patrick and Wolf, Thomas and \u0160a\u0161ko, Mario and Jernite, Yacine and Thakur, Abhishek and Tunstall, Lewis and Patil, Suraj and Drame, Mariama and Chaumond, Julien and Plu, Julien and Davison, Joe and Brandeis, Simon and Sanh, Victor and Le Scao, Teven and Canwen Xu, Kevin and Patry, Nicolas and Liu, Steven and McMillan-Major, Angelina and Schmid, Philipp and Gugger, Sylvain and Raw, Nathan and Lesage, Sylvain and Lozhkov, Anton and Carrigan, Matthew and Matussi\u00e8re, Th\u00e9o and von Werra, Leandro and Debut, Lysandre and Bekman, Stas and Delangue, Cl\u00e9ment},\r\nbooktitle = {Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing: System Demonstrations},\r\nmonth = {11},\r\npages = {175--184},\r\npublisher = {Association for Computational Linguistics},\r\ntitle = {{Datasets: A Community Library for Natural Language Processing}},\r\nurl = {https:\/\/github.com\/huggingface\/datasets},\r\nyear = {2021}\r\n}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3229\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3228","id":1046702143,"node_id":"PR_kwDODunzps4uMJ58","number":3228,"title":"Add CITATION file","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-07T09:40:19Z","updated_at":"2021-11-07T09:51:47Z","closed_at":"2021-11-07T09:51:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3228","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3228","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3228.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3228.patch","merged_at":"2021-11-07T09:51:46Z"},"body":"Add CITATION file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3228\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3227","id":1046667845,"node_id":"I_kwDODunzps4-YuJF","number":3227,"title":"Error in `Json(datasets.ArrowBasedBuilder)` class","user":{"login":"JunShern","id":7796965,"node_id":"MDQ6VXNlcjc3OTY5NjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7796965?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JunShern","html_url":"https:\/\/github.com\/JunShern","followers_url":"https:\/\/api.github.com\/users\/JunShern\/followers","following_url":"https:\/\/api.github.com\/users\/JunShern\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JunShern\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JunShern\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JunShern\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JunShern\/orgs","repos_url":"https:\/\/api.github.com\/users\/JunShern\/repos","events_url":"https:\/\/api.github.com\/users\/JunShern\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JunShern\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-11-07T05:50:32Z","updated_at":"2021-11-09T19:09:15Z","closed_at":"2021-11-09T19:09:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen a json file contains a `text` field that is larger than the block_size, the JSON dataset builder fails.\r\n\r\n## Steps to reproduce the bug\r\nCreate a folder that contains the following:\r\n```\r\n.\r\n\u251c\u2500\u2500 testdata\r\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 mydata.json\r\n\u2514\u2500\u2500 test.py\r\n```\r\n\r\nPlease download [this file](https:\/\/github.com\/huggingface\/datasets\/files\/7491797\/mydata.txt) as `mydata.json`. (The error does not occur in JSON files with shorter text, but it is reproducible when the text is long as in the file I provide)\r\n:exclamation: :exclamation: GitHub doesn't allow me to upload JSON so this file is a TXT, and you should rename it to `.json`!\r\n\r\n`test.py` simply contains:\r\n```python\r\nfrom datasets import load_dataset\r\nmy_dataset = load_dataset(\"testdata\")\r\n```\r\n\r\nTo reproduce the error, simply run\r\n```\r\npython test.py\r\n```\r\n\r\n## Expected results\r\nThe data should load correctly without error.\r\n\r\n## Actual results\r\nThe dataset builder fails with:\r\n```\r\nUsing custom data configuration testdata-d490389b8ab4fd82\r\nDownloading and preparing dataset json\/testdata to \/home\/junshern.chan\/.cache\/huggingface\/datasets\/json\/testdata-d490389b8ab4fd82\/0.0.0\/3333a8af0db9764dfcff43a42ff26228f0f2e267f0d8a0a294452d188beadb34...\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 2264.74it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 447.01it\/s]\r\nFailed to read file '\/home\/junshern.chan\/hf-json-bug\/testdata\/mydata.json' with error : JSON parse error: Missing a name for object member. in row 0\r\nTraceback (most recent call last):\r\n File \"test.py\", line 28, in \r\n my_dataset = load_dataset(\"testdata\")\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 1156, in _prepare_split\r\n for key, table in utils.tqdm(\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/tqdm\/std.py\", line 1168, in __iter__\r\n for obj in iterable:\r\n File \"\/home\/junshern.chan\/.casio\/miniconda\/envs\/hf-json-bug\/lib\/python3.8\/site-packages\/datasets\/packaged_modules\/json\/json.py\", line 146, in _generate_tables\r\n raise ValueError(\r\nValueError: Not able to read records in the JSON file at \/home\/junshern.chan\/hf-json-bug\/testdata\/mydata.json. You should probably indicate the field of the JSON file containing your records. This JSON file contain the following fields: ['text']. Select the correct one and provide it as `field='XXX'` to the dataset loading method. \r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.15.1\r\n- Platform: Linux-5.8.0-63-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.12\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3227\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3226","id":1046584518,"node_id":"PR_kwDODunzps4uL0ma","number":3226,"title":"Fix paper BibTeX citation with proceedings reference","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-06T19:52:59Z","updated_at":"2021-11-07T07:05:28Z","closed_at":"2021-11-07T07:05:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3226","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3226","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3226.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3226.patch","merged_at":"2021-11-07T07:05:27Z"},"body":"Fix paper BibTeX citation with proceedings reference.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3226\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3225","id":1046530493,"node_id":"PR_kwDODunzps4uLrB3","number":3225,"title":"Update tatoeba to v2021-07-22","user":{"login":"KoichiYasuoka","id":15098598,"node_id":"MDQ6VXNlcjE1MDk4NTk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15098598?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KoichiYasuoka","html_url":"https:\/\/github.com\/KoichiYasuoka","followers_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/followers","following_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/orgs","repos_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/repos","events_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KoichiYasuoka\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-11-06T15:14:31Z","updated_at":"2021-11-12T11:13:13Z","closed_at":"2021-11-12T11:13:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3225","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3225","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3225.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3225.patch","merged_at":"2021-11-12T11:13:13Z"},"body":"Tatoeba's latest version is v2021-07-22","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3225\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3224","id":1046495831,"node_id":"PR_kwDODunzps4uLk2q","number":3224,"title":"User-pickling with dynamic sub-classing","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":18,"created_at":"2021-11-06T12:08:24Z","updated_at":"2022-01-05T13:11:36Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3224","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3224","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3224.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3224.patch","merged_at":null},"body":"This is a continuation of the now closed PR in https:\/\/github.com\/huggingface\/datasets\/pull\/3206. The discussion there has shaped a new approach to do this.\r\n\r\nIn this PR, behavior of `pklregister` and `Pickler` is extended. Earlier, users were already able to register custom pickle functions. That is useful if they have objects that are not easily picklable with default methods. When one registers a custom function to a type, an object of that type will be pickled with the given function by `Pickler` which looks up the type in its `dispatch` table. The downside of this method, and of `pickle` in general, is that it is limited to direct type-matching and does not allow sub-classes. In many, default, cases that is not an issue. But when you are using external libraries where classes (e.g. parsers, models) are sub-classed this is not ideal. \r\n\r\n```python\r\nfrom datasets.fingerprint import Hasher\r\nfrom datasets.utils.py_utils import pklregister\r\n\r\nclass BaseParser:\r\n pass\r\n\r\nclass EnglishParser(BaseParser):\r\n pass\r\n\r\n@pklregister(BaseParser)\r\ndef custom_pkl_func(pickler, obj):\r\n print(f\"Called the custom pickle function for type {type(obj)}!\")\r\n # do something with the obj and ultimately save with the pickler\r\n\r\nbase = BaseParser()\r\nen = EnglishParser()\r\n\r\n# Hasher.hash uses the Pickler behind the scenes\r\n# `custom_pkl_func` called for base\r\nHasher.hash(base)\r\n# `custom_pkl_func` not called for en :-(\r\nHasher.hash(en)\r\n```\r\n\r\nIn the example above we'd want to sub-class `EnglishParser` to be handled in the same way as its super-class `BaseParser`. This PR solves that by allowing for a keyword-argument `allow_subclasses` in `pklregister` (default: `False`). \r\n\r\n```python\r\n@pklregister(BaseParser, allow_subclasses=True)\r\n```\r\n\r\nWhen this option is enabled, we not only save the function in `Pickler.dispatch` but also save it in a custom table `Pickler.subclass_dispatch` **which allows us to dynamically add sub-classes of that class to the real dispatch table**. Then, if we want to pickle an object `obj` with `Pickler.dump()` (which ultimately will call `Pickler.save()`) we _first_ check whether any of the object's super-classes exist in `Pickler.sublcass_dispatch` and get the related custom pickle function. If we find one, we add the type of `obj` alongside the function to `Pickler.dispatch`. All of this happens at the start of the call to `Pickler.save()`. _Only then_ dill.Pickler's `save` will be called, which in turn will call `pickle._Pickler.save` which handles everything. Here, the `Pickler.dispatch` table will be used to look up custom pickler functions - and it now also includes the function for `obj`, which was copied from its super-class, which we added at the very start of our custom `Pickler.save()`.\r\n\r\nFor edge cases and, especially, for testing, a contextmanager class `TempPickleRegistry` is included that resets the pickle registry on exit to its previous state.\r\n\r\n```python\r\nwith TempPickleRegistry():\r\n @pklregister(MyObjClass)\r\n def pickle_registry_test_false(pickler, obj):\r\n pickler.save(obj.fancy_method())\r\n\r\n some_obj = MyObjClass()\r\n dumps(some_obj)\r\n # `MyObjClass` is in Pickler.dispatch\r\n\r\n# ... `MyObjClass` is _not_ in Pickler.dispatch anymore\r\n```\r\n\r\ncloses https:\/\/github.com\/huggingface\/datasets\/issues\/3178\r\n\r\nTo Do\r\n====\r\n- [x] Write tests\r\n- [ ] Write documentation\/examples?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3224\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3223","id":1046445507,"node_id":"PR_kwDODunzps4uLb1E","number":3223,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-06T06:41:52Z","updated_at":"2021-11-06T07:06:38Z","closed_at":"2021-11-06T07:06:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3223","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3223","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3223.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3223.patch","merged_at":"2021-11-06T07:06:38Z"},"body":"Update BibTeX entry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3223\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3222","id":1046299725,"node_id":"PR_kwDODunzps4uK_uG","number":3222,"title":"Add docs for audio processing","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-11-05T23:07:59Z","updated_at":"2021-11-24T16:32:08Z","closed_at":"2021-11-24T15:35:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3222","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3222","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3222.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3222.patch","merged_at":"2021-11-24T15:35:52Z"},"body":"This PR adds documentation for the `Audio` feature. It describes:\r\n\r\n- The difference between loading `path` and `audio`, as well as use-cases\/best practices for each of them.\r\n- Resampling audio files with `cast_column`, and then calling `ds[0][\"audio\"]` to automatically decode and resample to the desired sampling rate.\r\n- Resampling with `map`.\r\n\r\nPreview [here](https:\/\/52969-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/audio_process.html), let me know if I'm missing anything!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3222\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3221","id":1045890512,"node_id":"PR_kwDODunzps4uJp4Z","number":3221,"title":"Resolve data_files by split name","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-11-05T14:07:35Z","updated_at":"2021-11-08T13:52:20Z","closed_at":"2021-11-05T17:49:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3221","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3221","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3221.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3221.patch","merged_at":"2021-11-05T17:49:57Z"},"body":"As discussed in https:\/\/github.com\/huggingface\/datasets\/issues\/3027 we should automatically infer what file is supposed to go to what split automatically, based on filenames.\r\n\r\nI added the support for different kinds of patterns, for both dataset repositories and local directories:\r\n\r\n```\r\n Input structure:\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u2514\u2500\u2500 dataset.csv\r\n\r\n Output patterns:\r\n\r\n {\"train\": [\"*\"]}\r\n```\r\n```\r\n Input structure:\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u251c\u2500\u2500 train.csv\r\n \u2514\u2500\u2500 test.csv\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u2514\u2500\u2500 data\/\r\n \u251c\u2500\u2500 train.csv\r\n \u2514\u2500\u2500 test.csv\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u251c\u2500\u2500 train_0.csv\r\n \u251c\u2500\u2500 train_1.csv\r\n \u251c\u2500\u2500 train_2.csv\r\n \u251c\u2500\u2500 train_3.csv\r\n \u251c\u2500\u2500 test_0.csv\r\n \u2514\u2500\u2500 test_1.csv\r\n\r\n Output patterns:\r\n\r\n {\"train\": [\"*train*\"], \"test\": [\"*test*\"]}\r\n```\r\n```\r\n Input structure:\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u2514\u2500\u2500 data\/\r\n \u251c\u2500\u2500 train\/\r\n \u2502 \u251c\u2500\u2500 shard_0.csv\r\n \u2502 \u251c\u2500\u2500 shard_1.csv\r\n \u2502 \u251c\u2500\u2500 shard_2.csv\r\n \u2502 \u2514\u2500\u2500 shard_3.csv\r\n \u2514\u2500\u2500 test\/\r\n \u251c\u2500\u2500 shard_0.csv\r\n \u2514\u2500\u2500 shard_1.csv\r\n\r\n Output patterns:\r\n\r\n {\"train\": [\"*train*\/*\", \"*train*\/**\/*\"], \"test\": [\"*test*\/*\", \"*test*\/**\/*\"]}\r\n```\r\n\r\nand also this pattern that allows to have custom split names, and that is the structure used by #3098 for `push_to_hub` (cc @LysandreJik ):\r\n\r\n```\r\n Input structure:\r\n\r\n my_dataset_repository\/\r\n \u251c\u2500\u2500 README.md\r\n \u2514\u2500\u2500 data\/\r\n \u251c\u2500\u2500 train-00000-of-00003.csv\r\n \u251c\u2500\u2500 train-00001-of-00003.csv\r\n \u251c\u2500\u2500 train-00002-of-00003.csv\r\n \u251c\u2500\u2500 test-00000-of-00001.csv\r\n \u251c\u2500\u2500 random-00000-of-00003.csv\r\n \u251c\u2500\u2500 random-00001-of-00003.csv\r\n \u2514\u2500\u2500 random-00002-of-00003.csv\r\n\r\n Output patterns:\r\n\r\n {\r\n \"train\": [\"data\/train-[0-9][0-9][0-9][0-9][0-9]-of-[0-9][0-9][0-9][0-9][0-9].*\"],\r\n \"test\": [\"data\/test-[0-9][0-9][0-9][0-9][0-9]-of-[0-9][0-9][0-9][0-9][0-9].*\"],\r\n \"random\": [\"data\/random-[0-9][0-9][0-9][0-9][0-9]-of-[0-9][0-9][0-9][0-9][0-9].*\"],\r\n }\r\n```\r\n\r\nYou can check the documentation about structuring your repository [here](https:\/\/52640-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/repository_structure.html). cc @stevhliu \r\n\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/3027\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/3212\r\n\r\nIn the future we can also add support for dataset configurations.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3221\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3220","id":1045549029,"node_id":"I_kwDODunzps4-Uc_l","number":3220,"title":"Add documentation about dataset viewer feature","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-05T08:11:19Z","updated_at":"2021-11-05T08:11:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Add to the docs more details about the dataset viewer feature in the Hub.\r\n\r\nCC: @julien-c \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3220\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3219","id":1045095000,"node_id":"I_kwDODunzps4-SuJY","number":3219,"title":"Eventual Invalid Token Error at setup of private datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-11-04T18:50:45Z","updated_at":"2021-11-08T13:23:06Z","closed_at":"2021-11-08T08:59:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nFrom time to time, there appear Invalid Token errors with private datasets:\r\n\r\n- https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8520\/workflows\/d44629f2-4749-40f8-a657-50931d0b3434\/jobs\/52534\r\n ```\r\n ____________ ERROR at setup of test_load_streaming_private_dataset _____________\r\n ValueError: Invalid token passed!\r\n\r\n ____ ERROR at setup of test_load_streaming_private_dataset_with_zipped_data ____\r\n ValueError: Invalid token passed!\r\n \r\n =========================== short test summary info ============================\r\n ERROR tests\/test_load.py::test_load_streaming_private_dataset - ValueError: I...\r\n ERROR tests\/test_load.py::test_load_streaming_private_dataset_with_zipped_data\r\n ```\r\n\r\n- https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8557\/workflows\/a8383181-ba6d-4487-9d0a-f750b6dcb936\/jobs\/52763\r\n ```\r\n ____ ERROR at setup of test_load_streaming_private_dataset_with_zipped_data ____\r\n [gw1] linux -- Python 3.6.15 \/home\/circleci\/.pyenv\/versions\/3.6.15\/bin\/python3.6\r\n\r\n hf_api = \r\n hf_token = 'vgNbyuaLNEBuGbgCEtSBCOcPjZnngJufHkTaZvHwkXKGkHpjBPwmLQuJVXRxBuaRzNlGjlMpYRPbthfHPFWXaaEDTLiqTTecYENxukRYVAAdpeApIUPxcgsowadkTkPj'\r\n zip_csv_path = PosixPath('\/tmp\/pytest-of-circleci\/pytest-0\/popen-gw1\/data16\/dataset.csv.zip')\r\n\r\n @pytest.fixture(scope=\"session\")\r\n def hf_private_dataset_repo_zipped_txt_data_(hf_api: HfApi, hf_token, zip_csv_path):\r\n repo_name = \"repo_zipped_txt_data-{}\".format(int(time.time() * 10e3))\r\n hf_api.create_repo(token=hf_token, name=repo_name, repo_type=\"dataset\", private=True)\r\n repo_id = f\"{USER}\/{repo_name}\"\r\n hf_api.upload_file(\r\n token=hf_token,\r\n path_or_fileobj=str(zip_csv_path),\r\n path_in_repo=\"data.zip\",\r\n repo_id=repo_id,\r\n > repo_type=\"dataset\",\r\n )\r\n\r\n tests\/hub_fixtures.py:68:\r\n\r\n ...\r\n\r\n ValueError: Invalid token passed!\r\n =========================== short test summary info ============================\r\n ERROR tests\/test_load.py::test_load_streaming_private_dataset_with_zipped_data\r\n ```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3219\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3218","id":1045032313,"node_id":"PR_kwDODunzps4uG2UA","number":3218,"title":"Fix code quality in riddle_sense dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-04T17:43:20Z","updated_at":"2021-11-04T17:50:03Z","closed_at":"2021-11-04T17:50:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3218","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3218","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3218.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3218.patch","merged_at":"2021-11-04T17:50:02Z"},"body":"Fix trailing whitespace.\r\n\r\nFix #3217.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3218\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3217","id":1045029710,"node_id":"I_kwDODunzps4-SeNO","number":3217,"title":"Fix code quality bug in riddle_sense dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-11-04T17:40:32Z","updated_at":"2021-11-04T17:50:02Z","closed_at":"2021-11-04T17:50:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n```\r\ndatasets\/riddle_sense\/riddle_sense.py:36:21: W291 trailing whitespace\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3217\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3216","id":1045027733,"node_id":"PR_kwDODunzps4uG1YS","number":3216,"title":"Pin version exclusion for tensorflow incompatible with keras","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-04T17:38:06Z","updated_at":"2021-11-05T10:57:38Z","closed_at":"2021-11-05T10:57:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3216","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3216","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3216.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3216.patch","merged_at":"2021-11-05T10:57:37Z"},"body":"Once `tensorflow` version 2.6.2 is released:\r\n- https:\/\/github.com\/tensorflow\/tensorflow\/commit\/c1867f3bfdd1042f694df7a9870be51ba80543cb\r\n- https:\/\/pypi.org\/project\/tensorflow\/2.6.2\/\r\n\r\nwith the patch:\r\n- tensorflow\/tensorflow#52927\r\n\r\nwe can remove the temporary fix we introduced in:\r\n- #3208\r\n\r\nFix #3209.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3216\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3215","id":1045011207,"node_id":"PR_kwDODunzps4uGx4o","number":3215,"title":"Small updates to to_tf_dataset documentation","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-04T17:22:01Z","updated_at":"2021-11-04T18:55:38Z","closed_at":"2021-11-04T18:55:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3215","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3215","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3215.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3215.patch","merged_at":"2021-11-04T18:55:37Z"},"body":"I added a little more description about `to_tf_dataset` compared to just setting the format","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3215\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3214","id":1044924050,"node_id":"I_kwDODunzps4-SEaS","number":3214,"title":"Add ACAV100M Dataset","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-04T15:59:58Z","updated_at":"2021-12-08T12:00:30Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *ACAV100M*\r\n- **Description:** *contains 100 million videos with high audio-visual correspondence, ideal for self-supervised video representation learning.*\r\n- **Paper:** *https:\/\/arxiv.org\/abs\/2101.10803*\r\n- **Data:** *https:\/\/github.com\/sangho-vision\/acav100m*\r\n- **Motivation:** *The largest dataset (to date) for audio-visual learning.*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3214\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3213","id":1044745313,"node_id":"PR_kwDODunzps4uF6W9","number":3213,"title":"Fix tuple_ie download url","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-04T13:09:07Z","updated_at":"2021-11-05T14:16:06Z","closed_at":"2021-11-05T14:16:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3213","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3213","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3213.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3213.patch","merged_at":"2021-11-05T14:16:05Z"},"body":"Fix #3204 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3213\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3212","id":1044640967,"node_id":"I_kwDODunzps4-Q_TH","number":3212,"title":"Sort files before loading","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-11-04T11:08:31Z","updated_at":"2021-11-05T17:49:58Z","closed_at":"2021-11-05T17:49:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When loading a dataset that consists of several files (e.g. `my_data\/data_001.json`, `my_data\/data_002.json` etc.) they are not loaded in order when using `load_dataset(\"my_data\")`.\r\n\r\nThis could lead to counter-intuitive results if, for example, the data files are sorted by date or similar since they would appear in different order in the `Dataset`.\r\n\r\nThe straightforward solution is to sort the list of files alphabetically before loading them.\r\n\r\ncc @lhoestq \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3212\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3211","id":1044617913,"node_id":"PR_kwDODunzps4uFkBx","number":3211,"title":"Fix disable_nullable default value to False","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-04T10:52:06Z","updated_at":"2021-11-04T11:08:21Z","closed_at":"2021-11-04T11:08:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3211","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3211","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3211.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3211.patch","merged_at":"2021-11-04T11:08:20Z"},"body":"Currently the `disable_nullable` parameter is not consistent across all dataset transforms. For example it is `False` in `map` but `True` in `flatten_indices`.\r\n\r\nThis creates unexpected behaviors like this\r\n```python\r\nfrom datasets import Dataset, concatenate_datasets\r\n\r\nd1 = Dataset.from_dict({\"a\": [0, 1, 2, 3]})\r\nd2 = d1.filter(lambda x: x[\"a\"] < 2).flatten_indices()\r\nd1.data.schema == d2.data.schema # False\r\n```\r\nThis can cause issues when concatenating datasets for example.\r\n\r\nFor consistency I set `disable_nullable` to `False` in `flatten_indices` and I fixed some docstrings\r\n\r\ncc @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3211\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3210","id":1044611471,"node_id":"I_kwDODunzps4-Q4GP","number":3210,"title":"ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.15.1\/datasets\/wmt16\/wmt16.py","user":{"login":"xiuzhilu","id":28184983,"node_id":"MDQ6VXNlcjI4MTg0OTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28184983?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/xiuzhilu","html_url":"https:\/\/github.com\/xiuzhilu","followers_url":"https:\/\/api.github.com\/users\/xiuzhilu\/followers","following_url":"https:\/\/api.github.com\/users\/xiuzhilu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/xiuzhilu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/xiuzhilu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/xiuzhilu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/xiuzhilu\/orgs","repos_url":"https:\/\/api.github.com\/users\/xiuzhilu\/repos","events_url":"https:\/\/api.github.com\/users\/xiuzhilu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/xiuzhilu\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-04T10:47:26Z","updated_at":"2022-01-19T13:58:46Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"when I use python examples\/pytorch\/translation\/run_translation.py --model_name_or_path examples\/pytorch\/translation\/opus-mt-en-ro --do_train --do_eval --source_lang en --target_lang ro --dataset_name wmt16 --dataset_config_name ro-en --output_dir \/tmp\/tst-translation --per_device_train_batch_size=4 --per_device_eval_batch_size=4 --overwrite_output_dir --predict_with_generate to finetune translation model on huggingface, I get the issue\"ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.15.1\/datasets\/wmt16\/wmt16.py\".But I can open the https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.15.1\/datasets\/wmt16\/wmt16.py by using website. What should I do to solve the issue?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3210\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3209","id":1044505771,"node_id":"I_kwDODunzps4-QeSr","number":3209,"title":"Unpin keras once TF fixes its release","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-04T09:15:32Z","updated_at":"2021-11-05T10:57:37Z","closed_at":"2021-11-05T10:57:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Related to:\r\n- #3208","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3209\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3208","id":1044504093,"node_id":"PR_kwDODunzps4uFTIs","number":3208,"title":"Pin keras version until TF fixes its release","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-04T09:13:32Z","updated_at":"2021-11-04T09:30:55Z","closed_at":"2021-11-04T09:30:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3208","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3208","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3208.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3208.patch","merged_at":"2021-11-04T09:30:54Z"},"body":"Fix #3207.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3208\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3207","id":1044496389,"node_id":"I_kwDODunzps4-QcAF","number":3207,"title":"CI error: Another metric with the same name already exists in Keras 2.7.0","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-11-04T09:04:11Z","updated_at":"2021-11-04T09:30:54Z","closed_at":"2021-11-04T09:30:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nRelease of TensorFlow 2.7.0 contains an incompatibility with Keras. See:\r\n- keras-team\/keras#15579\r\n\r\nThis breaks our CI test suite: https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8493\/workflows\/055c7ae2-43bc-49b4-9f11-8fc71f35a25c\/jobs\/52363\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3207\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3206","id":1044216270,"node_id":"PR_kwDODunzps4uEZJe","number":3206,"title":"[WIP] Allow user-defined hash functions via a registry","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":13,"created_at":"2021-11-03T23:25:42Z","updated_at":"2021-11-05T12:38:11Z","closed_at":"2021-11-05T12:38:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3206","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3206","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3206.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3206.patch","merged_at":null},"body":"Inspired by the discussion on hashing in https:\/\/github.com\/huggingface\/datasets\/issues\/3178#issuecomment-959016329, @lhoestq suggested that it would be neat to allow users more control over the hashing process. Specifically, it would be great if users can specify specific hashing functions depending on the **class** of the object.\r\n\r\nAs an example, we found in the linked topic that loaded spaCy models (`Language` objects) have different hashes when `dump`'d, but their byte representation with `Language.to_bytes()` _is_ deterministic. It would therefore be great if we could specify that for `Language` objects, the hasher should hash the objects `to_bytes()` return value instead of the object itself.\r\n\r\nThis PR adds a new, but tiny, dependency to manage the registry, namely [`catalogue`](https:\/\/github.com\/explosion\/catalogue). \r\n\r\nTwo files have been changed (apart from the added dependency in `setup.py`) and one file has been added.\r\n\r\n**utils.registry** (added)\r\n\r\nThis file defines our custom Registry and builds a registry called \"hashers\". A Registry is basically dictionary from names (str) to functions. A function can be added to the registry by a decorator, e.g. \r\n\r\n```python\r\n@hashers.register(spacy.Language)\r\ndef hash_spacy_language(nlp):\r\n return Hasher.hash(nlp.to_bytes())\r\n```\r\n\r\nYou'll notice that `spacy.Language` is not a string, even though the registry holds a str->func mapping. To accomplish this with classes in a dynamic way, catalogue.Registry needed to be subclassed and modified as `DatasetsRegistry`. All methods that use a name as an input are now modified so that classes are deterministically converted in strings in such a way that we can later retrieve the actual class from the string (below).\r\n\r\n**utils.py_utils** (modified)\r\n\r\nAdded two functions to deal with classes and their qualified names, that is, their full descriptive name including the module. On the one hand it allows us to retrieve a string from a given class, e.g. given `Module` class, return `torch.nn.Module` str. Conversly, a function is added to convert such a full qualified name into a class. For instance, given the string `torch.nn.Module`, return the `Module` class. These straightforward methods allow us to interchangeably use classes and strings without any needed user interaction - they can just register a class, and behind the scenes `DatasetsRegistry` converts these to deterministic strings.\r\n\r\n**fingerprint** (modified)\r\n\r\nUpdated Hasher.hash so that if the object to hash is an instance of a class in the registry, the registered function is used to hash the object instead of the default behavior. To do so we iterate over the registry `hashers` and convert its keys (strings) into classes, and then we can use `isinstance`.\r\n\r\n```python\r\n# Check if the current object is an instance that is\r\n# applicable to the user-defined hashers. If so, hash\r\n# with the user-defined function\r\nfor full_module_name, func in hashers.get_all().items():\r\n registered_cls = get_cls_from_qualname(full_module_name)\r\n if isinstance(value, registered_cls):\r\n return func(value)\r\n```\r\n\r\n**Putting it all together**\r\n\r\nTo test this, you can try the following example with spaCy. First install spaCy from source and checkout a specific commit.\r\n\r\n```shell\r\ngit clone https:\/\/github.com\/explosion\/spaCy.git\r\ncd spaCy\/\r\ngit checkout cab9209c3dfcd1b75dfe5657f10e52c4d847a3cf\r\ncd ..\r\n\r\ngit clone https:\/\/github.com\/BramVanroy\/datasets.git\r\ncd datasets\r\ngit checkout registry\r\npip install -e .\r\npip install ..\/spaCy\r\nspacy download en_core_web_sm\r\n```\r\n\r\nNow you can run the following script. By default it will use the custom hasher function for the Language object. You can enable the default behavior by commenting out `@hashers.register...`.\r\n\r\n```python\r\nimport spacy\r\n\r\nfrom datasets.fingerprint import Hasher\r\nfrom datasets.utils.registry import hashers\r\n\r\n# Register a function so that when the Hasher encounters a spacy.Language object\r\n# it uses this custom function to hash instead of the default\r\n@hashers.register(spacy.Language)\r\ndef hash_spacy_language(nlp):\r\n return Hasher.hash(nlp.to_bytes())\r\n\r\n\r\ndef main():\r\n print(hashers.get_all())\r\n nlp = spacy.load(\"en_core_web_sm\")\r\n dump1 = Hasher.hash(nlp)\r\n nlp = spacy.load(\"en_core_web_sm\")\r\n dump2 = Hasher.hash(nlp)\r\n print(dump1)\r\n # succeeds when using the registered custom function\r\n # fails if using the default\r\n assert dump1 == dump2\r\n\r\n\r\nif __name__ == '__main__':\r\n main()\r\n```\r\n\r\nTo do\r\n====\r\n- The above is just a proof-of-concept. I am open to changes\/suggestions\r\n- Tests still need to be written\r\n- We should consider whether we can make `DatasetsRegistry` very restrictive and ONLY allowing classes. That would make testing easier - otherwise we also need to test for other sorts of objects.\r\n- Maybe the `hashers` definition is better suited in `fingerprint`?\r\n- Documentation\/examples need to be updated\r\n- Not sure why the logger is not working in `hash()`\r\n- `get_cls_from_qualname` might need a fail-safe: is it possible for a full_qualname to not have a module, and if so how do we deal with that?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3206\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3205","id":1044099561,"node_id":"PR_kwDODunzps4uEAlw","number":3205,"title":"Add Multidoc2dial Dataset","user":{"login":"sivasankalpp","id":7344617,"node_id":"MDQ6VXNlcjczNDQ2MTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7344617?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sivasankalpp","html_url":"https:\/\/github.com\/sivasankalpp","followers_url":"https:\/\/api.github.com\/users\/sivasankalpp\/followers","following_url":"https:\/\/api.github.com\/users\/sivasankalpp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sivasankalpp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sivasankalpp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sivasankalpp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sivasankalpp\/orgs","repos_url":"https:\/\/api.github.com\/users\/sivasankalpp\/repos","events_url":"https:\/\/api.github.com\/users\/sivasankalpp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sivasankalpp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-11-03T20:48:31Z","updated_at":"2021-11-24T17:32:49Z","closed_at":"2021-11-24T16:55:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3205","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3205","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3205.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3205.patch","merged_at":"2021-11-24T16:55:08Z"},"body":"This PR adds the MultiDoc2Dial dataset introduced in this [paper](https:\/\/arxiv.org\/pdf\/2109.12595v1.pdf )","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3205\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3204","id":1043707307,"node_id":"I_kwDODunzps4-NbWr","number":3204,"title":"FileNotFoundError for TupleIE dataste","user":{"login":"arda-vianai","id":75334917,"node_id":"MDQ6VXNlcjc1MzM0OTE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75334917?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arda-vianai","html_url":"https:\/\/github.com\/arda-vianai","followers_url":"https:\/\/api.github.com\/users\/arda-vianai\/followers","following_url":"https:\/\/api.github.com\/users\/arda-vianai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arda-vianai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arda-vianai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arda-vianai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arda-vianai\/orgs","repos_url":"https:\/\/api.github.com\/users\/arda-vianai\/repos","events_url":"https:\/\/api.github.com\/users\/arda-vianai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arda-vianai\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-11-03T14:56:55Z","updated_at":"2021-11-05T15:51:15Z","closed_at":"2021-11-05T14:16:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n`dataset = datasets.load_dataset('tuple_ie', 'all')`\r\n\r\nreturns a FileNotFound error. Is the data not available? \r\n\r\nMany thanks.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3204\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3203","id":1043552766,"node_id":"PR_kwDODunzps4uCNoT","number":3203,"title":"Updated: DaNE - updated URL for download","user":{"login":"MalteHB","id":47593213,"node_id":"MDQ6VXNlcjQ3NTkzMjEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47593213?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MalteHB","html_url":"https:\/\/github.com\/MalteHB","followers_url":"https:\/\/api.github.com\/users\/MalteHB\/followers","following_url":"https:\/\/api.github.com\/users\/MalteHB\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MalteHB\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MalteHB\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MalteHB\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MalteHB\/orgs","repos_url":"https:\/\/api.github.com\/users\/MalteHB\/repos","events_url":"https:\/\/api.github.com\/users\/MalteHB\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MalteHB\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-11-03T12:55:13Z","updated_at":"2021-11-04T13:14:36Z","closed_at":"2021-11-04T11:46:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3203","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3203","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3203.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3203.patch","merged_at":"2021-11-04T11:46:43Z"},"body":"It seems that DaNLP has updated their download URLs and it therefore also needs to be updated in here...","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3203\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3202","id":1043213660,"node_id":"I_kwDODunzps4-Li1c","number":3202,"title":"Add mIoU metric","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-03T08:42:32Z","updated_at":"2021-11-03T08:54:44Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nRecently, some semantic segmentation models were added to HuggingFace Transformers, including [SegFormer](https:\/\/huggingface.co\/transformers\/model_doc\/segformer.html) and [BEiT](https:\/\/huggingface.co\/transformers\/model_doc\/beit.html).\r\n\r\nSemantic segmentation (which is the task of labeling every pixel of an image with a corresponding class) is typically evaluated using the Mean Intersection and Union (mIoU). Together with the upcoming Image Feature, adding this metric could be very handy when creating example scripts to fine-tune any Transformer-based model on a semantic segmentation dataset.\r\n\r\nAn implementation can be found [here](https:\/\/github.com\/open-mmlab\/mmsegmentation\/blob\/504965184c3e6bc9ec43af54237129ef21981a5f\/mmseg\/core\/evaluation\/metrics.py#L132) for instance.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3202\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3201","id":1043209142,"node_id":"I_kwDODunzps4-Lhu2","number":3201,"title":"Add GSM8K dataset","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-03T08:36:44Z","updated_at":"2021-11-03T08:36:44Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** GSM8K (short for Grade School Math 8k)\r\n- **Description:** GSM8K is a dataset of 8.5K high quality linguistically diverse grade school math word problems created by human problem writers.\r\n- **Paper:** https:\/\/openai.com\/blog\/grade-school-math\/\r\n- **Data:** https:\/\/github.com\/openai\/grade-school-math\r\n- **Motivation:** The dataset is useful to investigate the reasoning abilities of large Transformer models, such as GPT-3.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3201\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3200","id":1042887291,"node_id":"PR_kwDODunzps4uAZLu","number":3200,"title":"Catch token invalid error in CI","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-02T21:56:26Z","updated_at":"2021-11-03T09:41:08Z","closed_at":"2021-11-03T09:41:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3200","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3200","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3200.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3200.patch","merged_at":"2021-11-03T09:41:08Z"},"body":"The staging back end sometimes returns invalid token errors when trying to delete a repo.\r\nI modified the fixture in the test that uses staging to ignore this error","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3200\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3199","id":1042860935,"node_id":"PR_kwDODunzps4uAVzQ","number":3199,"title":"Bump huggingface_hub","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-02T21:29:10Z","updated_at":"2021-11-14T01:48:11Z","closed_at":"2021-11-02T21:41:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3199","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3199","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3199.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3199.patch","merged_at":"2021-11-02T21:41:40Z"},"body":"huggingface_hub just released its first minor version, so we need to update the dependency\r\n\r\nIt was supposed to be part of 1.15.0 but I'm adding it for 1.15.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3199\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3198","id":1042679548,"node_id":"PR_kwDODunzps4t_5G8","number":3198,"title":"Add Multi-Lingual LibriSpeech","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-02T18:23:59Z","updated_at":"2021-11-04T17:09:22Z","closed_at":"2021-11-04T17:09:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3198","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3198","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3198.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3198.patch","merged_at":"2021-11-04T17:09:22Z"},"body":"Add https:\/\/www.openslr.org\/94\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3198\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3197","id":1042541127,"node_id":"PR_kwDODunzps4t_cry","number":3197,"title":"Fix optimized encoding for arrays","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-02T15:55:53Z","updated_at":"2021-11-02T19:12:24Z","closed_at":"2021-11-02T19:12:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3197","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3197","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3197.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3197.patch","merged_at":"2021-11-02T19:12:23Z"},"body":"Hi !\r\n\r\n#3124 introduced a regression that made the benchmarks CI fail because of a bad array comparison when checking the first encoded element. This PR fixes this by making sure that encoding is applied on all sequence types except lists.\r\n\r\ncc @eladsegal fyi (no big deal)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3197\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3196","id":1042223913,"node_id":"PR_kwDODunzps4t-bxy","number":3196,"title":"QOL improvements: auto-flatten_indices and desc in map calls","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-02T11:28:50Z","updated_at":"2021-11-02T15:41:09Z","closed_at":"2021-11-02T15:41:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3196","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3196","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3196.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3196.patch","merged_at":"2021-11-02T15:41:08Z"},"body":"This PR:\r\n* automatically calls `flatten_indices` where needed: in `unique` and `save_to_disk` to avoid saving the indices file\r\n* adds descriptions to the map calls\r\n\r\nFix #3040 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3196\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3195","id":1042204044,"node_id":"PR_kwDODunzps4t-ZR0","number":3195,"title":"More robust `None` handling","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-11-02T11:15:10Z","updated_at":"2021-12-09T14:27:00Z","closed_at":"2021-12-09T14:26:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3195","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3195","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3195.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3195.patch","merged_at":"2021-12-09T14:26:57Z"},"body":"PyArrow has explicit support for `null` values, so it makes sense to support Nones on our side as well.\r\n\r\n[Colab Notebook with examples](https:\/\/colab.research.google.com\/drive\/1zcK8BnZYnRe3Ao2271u1T19ag9zLEiy3?usp=sharing)\r\n\r\nChanges:\r\n* allow None for the features types with special encoding (`ClassLabel, TranslationVariableLanguages, Value, _ArrayXD`)\r\n* handle None in `class_encode_column` (also there is an option to stringify Nones and treat them as a class)\r\n* support None sorting in `sort` (use pandas for that) \r\n* handle None in align_labels_with_mapping\r\n* support for None in ArrayXD (converts `None` to `np.nan` to align the behavior with PyArrow)\r\n* support for None in the Audio\/Image feature\r\n* allow promotion when concatenating tables (`pa.concat_tables(table_list, promote=True)`) and `null` row\/~~column~~ broadcasting similar to pandas \r\n\r\nAdditional notes:\r\n* use `null` instead of `none` for function arguments for consistency with existing `disable_nullable` \r\n* fixes a bug with the `update_metadata_with_features` call in `Dataset.rename_columns`\r\n* had to update some tests, let me know if that's ok\r\n\r\nTODO:\r\n- [x] check how the Audio features behaves with Nones\r\n- [x] Better None handling in `concatenate_datasets`\/`add_item`\r\n- [x] Fix formatting with Nones\r\n- [x] Add Colab with examples\r\n- [x] Tests\r\n\r\nTODOs for subsequent PRs:\r\n- Mention None handling in the docs\r\n- Add `drop_null`\/`fill_null` to `Dataset`\/`DatasetDict`\r\n\r\nFix #3181 #3253","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3195\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3194","id":1041999535,"node_id":"PR_kwDODunzps4t91Eg","number":3194,"title":"Update link to Datasets Tagging app in Spaces","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-02T08:13:50Z","updated_at":"2021-11-08T10:36:23Z","closed_at":"2021-11-08T10:36:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3194","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3194","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3194.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3194.patch","merged_at":"2021-11-08T10:36:22Z"},"body":"Fix #3193.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3194\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3193","id":1041971117,"node_id":"I_kwDODunzps4-Gzet","number":3193,"title":"Update link to datasets-tagging app","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-11-02T07:39:59Z","updated_at":"2021-11-08T10:36:22Z","closed_at":"2021-11-08T10:36:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Once datasets-tagging has been transferred to Spaces:\r\n- huggingface\/datasets-tagging#22\r\n\r\nWe should update the link in Datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3193\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3192","id":1041308086,"node_id":"I_kwDODunzps4-ERm2","number":3192,"title":"Multiprocessing filter\/map (tests) not working on Windows","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-01T15:36:08Z","updated_at":"2021-11-01T15:57:03Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"While running the tests, I found that the multiprocessing examples fail on Windows, or rather they do not complete: they cause a deadlock. I haven't dug deep into it, but they do not seem to work as-is. I currently have no time to tests this in detail but at least the tests seem not to run correctly (deadlocking).\r\n\r\n## Steps to reproduce the bug\r\n```shell\r\npytest tests\/test_arrow_dataset.py -k \"test_filter_multiprocessing\"\r\npytest tests\/test_arrow_dataset.py -k \"test_map_multiprocessing\"\r\n```\r\n\r\n## Expected results\r\nThe functionality to work on all platforms.\r\n\r\n## Actual results\r\nDeadlock.\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.1.dev0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.9.2, also tested with 3.7.9\r\n- PyArrow version: 4.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3192\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3191","id":1041225111,"node_id":"I_kwDODunzps4-D9WX","number":3191,"title":"Dataset viewer issue for '*compguesswhat*'","user":{"login":"benotti","id":2545336,"node_id":"MDQ6VXNlcjI1NDUzMzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2545336?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/benotti","html_url":"https:\/\/github.com\/benotti","followers_url":"https:\/\/api.github.com\/users\/benotti\/followers","following_url":"https:\/\/api.github.com\/users\/benotti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/benotti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/benotti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/benotti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/benotti\/orgs","repos_url":"https:\/\/api.github.com\/users\/benotti\/repos","events_url":"https:\/\/api.github.com\/users\/benotti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/benotti\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-11-01T14:16:49Z","updated_at":"2021-11-01T14:16:49Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*compguesswhat*'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/compguesswhat\r\n\r\nFile not found\r\n\r\nAm I the one who added this dataset ? No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3191\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3190","id":1041153631,"node_id":"I_kwDODunzps4-Dr5f","number":3190,"title":"combination of shuffle and filter results in a bug","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-11-01T13:07:29Z","updated_at":"2021-11-02T10:50:49Z","closed_at":"2021-11-02T10:50:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nHi,\r\nI would like to shuffle a dataset, then filter it based on each existing label. however, the combination of `filter`, `shuffle` seems to results in a bug. In the minimal example below, as you see in the filtered results, the filtered labels are not unique, meaning filter has not worked. Any suggestions as a temporary fix is appreciated @lhoestq.\r\n\r\nThanks.\r\n Best regards\r\nRabeeh \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport numpy as np\r\nimport datasets \r\n\r\ndatasets = datasets.load_dataset('super_glue', 'rte', script_version=\"master\")\r\nshuffled_data = datasets[\"train\"].shuffle(seed=42)\r\nfor label in range(2):\r\n print(\"label \", label)\r\n data = shuffled_data.filter(lambda example: int(example['label']) == label)\r\n print(\"length \", len(data), np.unique(data['label']))\r\n```\r\n\r\n## Expected results\r\nFiltering per label, should only return the data with that specific label.\r\n\r\n## Actual results\r\nAs you can see, filtered data per label, has still two labels of [0, 1]\r\n```\r\nlabel 0\r\nlength 1249 [0 1]\r\nlabel 1\r\nlength 1241 [0 1]\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1 \r\n- Platform: linux \r\n- Python version: 3.7.11 \r\n- PyArrow version: 5.0.0 \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3190\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3189","id":1041044986,"node_id":"I_kwDODunzps4-DRX6","number":3189,"title":"conll2003 incorrect label explanation","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-11-01T11:03:30Z","updated_at":"2021-11-09T10:40:58Z","closed_at":"2021-11-09T10:40:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the [conll2003](https:\/\/huggingface.co\/datasets\/conll2003#data-fields) README, the labels are described as follows\r\n\r\n> - `id`: a `string` feature.\r\n> - `tokens`: a `list` of `string` features.\r\n> - `pos_tags`: a `list` of classification labels, with possible values including `\"` (0), `''` (1), `#` (2), `$` (3), `(` (4).\r\n> - `chunk_tags`: a `list` of classification labels, with possible values including `O` (0), `B-ADJP` (1), `I-ADJP` (2), `B-ADVP` (3), `I-ADVP` (4).\r\n> - `ner_tags`: a `list` of classification labels, with possible values including `O` (0), `B-PER` (1), `I-PER` (2), `B-ORG` (3), `I-ORG` (4) `B-LOC` (5), `I-LOC` (6) `B-MISC` (7), `I-MISC` (8).\r\n\r\nFirst of all, it would be great if we can get a list of ALL possible pos_tags.\r\n\r\nSecond, the chunk tags labels cannot be correct. The description says the values go from 0 to 4 whereas the data shows values from at least 11 to 21 and 0.\r\n\r\nEDIT: not really a bug, sorry for mistagging.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3189\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3188","id":1040980712,"node_id":"I_kwDODunzps4-DBro","number":3188,"title":"conll2002 issues","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-11-01T09:49:24Z","updated_at":"2021-11-15T13:50:59Z","closed_at":"2021-11-12T17:18:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Link:** https:\/\/huggingface.co\/datasets\/conll2002\r\n\r\nThe dataset viewer throws a server error when trying to preview the dataset. \r\n\r\n```\r\nMessage: Extraction protocol 'train' for file at 'https:\/\/raw.githubusercontent.com\/teropa\/nlp\/master\/resources\/corpora\/conll2002\/esp.train' is not implemented yet\r\n```\r\n\r\nIn addition, the \"point of contact\" has encoding issues and does not work when clicked.\r\n\r\nAm I the one who added this dataset ? No, @lhoestq did","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3188\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3187","id":1040412869,"node_id":"PR_kwDODunzps4t44Ab","number":3187,"title":"Add ChrF(++) (as implemented in sacrebleu)","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-31T08:53:58Z","updated_at":"2021-11-02T14:50:50Z","closed_at":"2021-11-02T14:31:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3187","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3187","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3187.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3187.patch","merged_at":"2021-11-02T14:31:26Z"},"body":"Similar to my [PR for TER](https:\/\/github.com\/huggingface\/datasets\/pull\/3153), it feels only right to also include ChrF and friends. These are present in Sacrebleu and are therefore very similar to implement as TER and sacrebleu. I tested the implementation with sacrebleu's tests to verify. You can try this below for yourself\r\n\r\n```python\r\nimport datasets\r\n\r\n\r\nEPSILON = 1e-4\r\nchrf = datasets.load_metric(r\"path\\to\\datasets\\metrics\\chrf\")\r\n\r\ntest_cases = [\r\n ([\"abcdefg\"], [\"hijklmnop\"], 0.0),\r\n ([\"a\"], [\"b\"], 0.0),\r\n ([\"\"], [\"b\"], 0.0),\r\n ([\"\"], [\"ref\"], 0.0),\r\n ([\"\"], [\"reference\"], 0.0),\r\n ([\"aa\"], [\"ab\"], 8.3333),\r\n ([\"a\", \"b\"], [\"a\", \"c\"], 8.3333),\r\n ([\"a\"], [\"a\"], 16.6667),\r\n ([\"a b c\"], [\"a b c\"], 50.0),\r\n ([\"a b c\"], [\"abc\"], 50.0),\r\n ([\" risk assessment must be made of those who are qualified and expertise in the sector - these are the scientists .\"],\r\n [\"risk assessment has to be undertaken by those who are qualified and expert in that area - that is the scientists .\"], 63.361730),\r\n ([\" Die Beziehung zwischen Obama und Netanjahu ist nicht gerade freundlich. \"],\r\n [\"Das Verh\u00e4ltnis zwischen Obama und Netanyahu ist nicht gerade freundschaftlich.\"], 64.1302698),\r\n ([\"Niemand hat die Absicht, eine Mauer zu errichten\"], [\"Niemand hat die Absicht, eine Mauer zu errichten\"], 100.0),\r\n]\r\n\r\nfor hyp, ref, score in test_cases:\r\n # Note the reference transformation which is different from scarebleu's input format\r\n results = chrf.compute(predictions=hyp, references=[[r] for r in ref],\r\n char_order=6, word_order=0, beta=3, eps_smoothing=True)\r\n if abs(score - results[\"score\"]) > EPSILON:\r\n print(f\"expected {score}, got {results['score']} for {hyp} - {ref}\")\r\n\r\ntest_cases_effective_order = [\r\n ([\"a\"], [\"a\"], 100.0),\r\n ([\"\"], [\"reference\"], 0.0),\r\n ([\"a b c\"], [\"a b c\"], 100.0),\r\n ([\"a b c\"], [\"abc\"], 100.0),\r\n ([\"\"], [\"c\"], 0.0),\r\n ([\"a\", \"b\"], [\"a\", \"c\"], 50.0),\r\n ([\"aa\"], [\"ab\"], 25.0),\r\n]\r\n\r\nfor hyp, ref, score in test_cases_effective_order:\r\n # Note the reference transformation which is different from scarebleu's input format\r\n results = chrf.compute(predictions=hyp, references=[[r] for r in ref],\r\n char_order=6, word_order=0, beta=3, eps_smoothing=False)\r\n if abs(score - results[\"score\"]) > EPSILON:\r\n print(f\"expected {score}, got {results['score']} for {hyp} - {ref}\")\r\n\r\ntest_cases_keep_whitespace = [\r\n (\r\n [\"Die Beziehung zwischen Obama und Netanjahu ist nicht gerade freundlich.\"],\r\n [\"Das Verh\u00e4ltnis zwischen Obama und Netanyahu ist nicht gerade freundschaftlich.\"],\r\n 67.3481606,\r\n ),\r\n (\r\n [\"risk assessment must be made of those who are qualified and expertise in the sector - these are the scientists .\"],\r\n [\"risk assessment has to be undertaken by those who are qualified and expert in that area - that is the scientists .\"],\r\n 65.2414427,\r\n ),\r\n]\r\n\r\nfor hyp, ref, score in test_cases_keep_whitespace:\r\n # Note the reference transformation which is different from scarebleu's input format\r\n results = chrf.compute(predictions=hyp, references=[[r] for r in ref],\r\n char_order=6, word_order=0, beta=3,\r\n whitespace=True)\r\n if abs(score - results[\"score\"]) > EPSILON:\r\n print(f\"expected {score}, got {results['score']} for {hyp} - {ref}\")\r\n\r\n\r\npredictions = [\"The relationship between Obama and Netanyahu is not exactly friendly.\"]\r\nreferences = [[\"The ties between Obama and Netanyahu are not particularly friendly.\"]]\r\nprint(chrf.compute(predictions=predictions, references=references))\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3187\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3186","id":1040369397,"node_id":"I_kwDODunzps4-Asb1","number":3186,"title":"Dataset viewer for nli_tr","user":{"login":"e-budur","id":2246791,"node_id":"MDQ6VXNlcjIyNDY3OTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2246791?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/e-budur","html_url":"https:\/\/github.com\/e-budur","followers_url":"https:\/\/api.github.com\/users\/e-budur\/followers","following_url":"https:\/\/api.github.com\/users\/e-budur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/e-budur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/e-budur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/e-budur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/e-budur\/orgs","repos_url":"https:\/\/api.github.com\/users\/e-budur\/repos","events_url":"https:\/\/api.github.com\/users\/e-budur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/e-budur\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-31T03:56:33Z","updated_at":"2021-10-31T03:56:33Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for '*nli_tr*'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/nli_tr\r\n\r\nHello,\r\n\r\nThank you for the new dataset preview feature that will help the users to view the datasets online.\r\n\r\nWe just noticed that the dataset viewer widget in the `nli_tr` dataset shows the error below. The error must be due to a temporary problem that may have blocked access to the dataset through the dataset viewer. But the dataset is currently accessible through the link in the error message. May we kindly ask if it would be possible to rerun the job so that it can access the dataset for the dataset viewer function?\r\n\r\nThank you.\r\nEmrah\r\n\r\n\r\n------------------------------------------\r\nServer Error\r\nStatus code: 404\r\nException: FileNotFoundError\r\nMessage: [Errno 2] No such file or directory: 'zip:\/\/snli_tr_1.0_train.jsonl::https:\/\/tabilab.cmpe.boun.edu.tr\/datasets\/nli_datasets\/snli_tr_1.0.zip\r\n------------------------------------------\r\n\r\nAm I the one who added this dataset ? Yes\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3186\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3185","id":1040291961,"node_id":"I_kwDODunzps4-AZh5","number":3185,"title":"7z dataset preview not implemented?","user":{"login":"Kirili4ik","id":30757466,"node_id":"MDQ6VXNlcjMwNzU3NDY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30757466?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Kirili4ik","html_url":"https:\/\/github.com\/Kirili4ik","followers_url":"https:\/\/api.github.com\/users\/Kirili4ik\/followers","following_url":"https:\/\/api.github.com\/users\/Kirili4ik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Kirili4ik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Kirili4ik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Kirili4ik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Kirili4ik\/orgs","repos_url":"https:\/\/api.github.com\/users\/Kirili4ik\/repos","events_url":"https:\/\/api.github.com\/users\/Kirili4ik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Kirili4ik\/received_events","type":"User","site_admin":false},"labels":[{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-30T20:18:27Z","updated_at":"2021-10-30T20:18:27Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Dataset viewer issue for dataset 'samsum'\r\n\r\n**Link:** https:\/\/huggingface.co\/datasets\/samsum\r\n\r\nServer Error\r\nStatus code: 400\r\nException: NotImplementedError\r\nMessage: Extraction protocol '7z' for file at 'https:\/\/arxiv.org\/src\/1911.12237v2\/anc\/corpus.7z' is not implemented yet\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3185\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3184","id":1040114102,"node_id":"PR_kwDODunzps4t4J61","number":3184,"title":"RONEC v2","user":{"login":"dumitrescustefan","id":22746816,"node_id":"MDQ6VXNlcjIyNzQ2ODE2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22746816?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dumitrescustefan","html_url":"https:\/\/github.com\/dumitrescustefan","followers_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/followers","following_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/orgs","repos_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/repos","events_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dumitrescustefan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-30T10:50:03Z","updated_at":"2021-11-02T16:02:23Z","closed_at":"2021-11-02T16:02:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3184","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3184","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3184.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3184.patch","merged_at":"2021-11-02T16:02:22Z"},"body":"Hi, as we've recently finished with the new RONEC (Romanian Named Entity Corpus), we'd like to update the dataset here as well. It's actually essential as links to V1 are no longer valid. \r\n\r\nIn reality we'd like to replace completely v1, as v2 is a full re-annotation of v1 with additional data (up to 2x size vs v1).\r\n\r\nI've run the make style and all the dummy and real data test, and they passed.\r\n\r\nI hope it's okay to merge the new RONEC v2 in the datasets. \r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3184\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3183","id":1039761120,"node_id":"PR_kwDODunzps4t3Dag","number":3183,"title":"Add missing docstring to DownloadConfig","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-29T16:56:35Z","updated_at":"2021-11-02T10:25:38Z","closed_at":"2021-11-02T10:25:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3183","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3183","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3183.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3183.patch","merged_at":"2021-11-02T10:25:37Z"},"body":"Document the `use_etag` and `num_proc` attributes in `DownloadConig`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3183\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3182","id":1039739606,"node_id":"PR_kwDODunzps4t2-9J","number":3182,"title":"Don't memoize strings when hashing since two identical strings may have different python ids","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-29T16:26:17Z","updated_at":"2021-11-02T09:35:38Z","closed_at":"2021-11-02T09:35:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3182","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3182","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3182.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3182.patch","merged_at":"2021-11-02T09:35:37Z"},"body":"When hashing an object that has several times the same string, the hashing could return a different hash if the identical strings share the same python `id()` or not.\r\n\r\nHere is an example code that shows how the issue can affect the caching:\r\n```python\r\nimport json\r\nimport pyarrow as pa\r\nfrom datasets.features import Features\r\nfrom datasets.fingerprint import Hasher\r\n\r\nschema = pa.schema([pa.field(\"some_string\", pa.string()), pa.field(\"another_string\", pa.string())])\r\nfeatures_from_schema = Features.from_arrow_schema(schema)\r\nHasher.hash(features_from_schema) # dffa9dca9a73fd8c\r\n\r\nfeatures_dict = json.loads('{\"some_string\": {\"dtype\": \"string\", \"id\": null, \"_type\": \"Value\"}, \"another_string\": {\"dtype\": \"string\", \"id\": null, \"_type\": \"Value\"}}')\r\nfeatures_from_json = Features.from_dict(features_dict)\r\nHasher.hash(features_from_json) # 3812e76b15e6420e\r\n\r\nfeatures_from_schema == features_from_json # True\r\n```\r\n\r\nThis is because in `features_dict`, some strings like \"dtype\" are repeated but don't share the same id, contrary to the ones in `features_from_schema`.\r\n\r\nI fixed that by disabling memoization for strings.\r\n\r\nThis could be optimized in the future by implementing a smarter memoization with a special handling for strings.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3182\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3181","id":1039682097,"node_id":"I_kwDODunzps49-Eox","number":3181,"title":"`None` converted to `\"None\"` when loading a dataset","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":9,"created_at":"2021-10-29T15:23:53Z","updated_at":"2021-12-11T01:16:40Z","closed_at":"2021-12-09T14:26:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen loading a dataset `None` values of the type `NoneType` are converted to `'None'` of the type `str`.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nqasper = load_dataset(\"qasper\", split=\"train\", download_mode=\"reuse_cache_if_exists\")\r\nprint(qasper[60][\"full_text\"][\"section_name\"])\r\n```\r\n\r\nWhen installing version 1.1.40, the output is\r\n`[None, 'Introduction', 'Benchmark Datasets', ...]`\r\n\r\nWhen installing from the master branch, the output is\r\n`['None', 'Introduction', 'Benchmark Datasets', ...]`\r\n\r\nNotice how the first element was changed from `NoneType` to `str`.\r\n\r\n## Expected results\r\n`None` should stay as is.\r\n\r\n## Actual results\r\n`None` is converted to a string.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: master\r\n- Platform: Linux-4.4.0-19041-Microsoft-x86_64-with-glibc2.17\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3181\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3180","id":1039641316,"node_id":"PR_kwDODunzps4t2qQn","number":3180,"title":"fix label mapping","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-10-29T14:42:24Z","updated_at":"2021-11-02T13:41:07Z","closed_at":"2021-11-02T10:37:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3180","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3180","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3180.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3180.patch","merged_at":"2021-11-02T10:37:12Z"},"body":"Fixing label mapping for hlgd.\r\n0 correponds to same event and 1 corresponds to different event\r\n\"Capture\r\n\"Capture\r\nnt","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3180\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3179","id":1039571928,"node_id":"I_kwDODunzps499pvY","number":3179,"title":"Cannot load dataset when the config name is \"special\"","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-29T13:30:47Z","updated_at":"2021-10-29T13:35:21Z","closed_at":"2021-10-29T13:35:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nAfter https:\/\/github.com\/huggingface\/datasets\/pull\/3159, we can get the config name of \"Check\/region_1\", which is \"Check___region_1\".\r\n\r\nBut now we cannot load the dataset (not sure it's related to the above PR though). It's the case for all the similar datasets, listed in https:\/\/github.com\/huggingface\/datasets-preview-backend\/issues\/78\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n>>> from datasets import get_dataset_config_names\r\n>>> get_dataset_config_names(\"Check\/region_1\")\r\n['Check___region_1']\r\n>>> load_dataset(\"Check\/region_1\")\r\nUsing custom data configuration Check___region_1-d2b3bc48f11c9be2\r\nDownloading and preparing dataset json\/Check___region_1 to \/home\/slesage\/.cache\/huggingface\/datasets\/json\/Check___region_1-d2b3bc48f11c9be2\/0.0.0\/c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426...\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 4443.12it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 1277.19it\/s]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 1159, in _prepare_split\r\n writer.write_table(table)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/arrow_writer.py\", line 442, in write_table\r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.9\/site-packages\/datasets\/arrow_writer.py\", line 442, in \r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"pyarrow\/table.pxi\", line 1249, in pyarrow.lib.Table.__getitem__\r\n File \"pyarrow\/table.pxi\", line 1825, in pyarrow.lib.Table.column\r\n File \"pyarrow\/table.pxi\", line 1800, in pyarrow.lib.Table._ensure_integer_index\r\nKeyError: 'Field \"builder_name\" does not exist in table schema'\r\n```\r\n\r\nLoading in streaming mode also returns something strange:\r\n\r\n```python\r\n>>> list(load_dataset(\"Check\/region_1\", streaming=True, split=\"train\"))\r\nUsing custom data configuration Check___region_1-d2b3bc48f11c9be2\r\n[{'builder_name': None, 'citation': '', 'config_name': None, 'dataset_size': None, 'description': '', 'download_checksums': None, 'download_size': None, 'features': {'speech': {'feature': {'dtype': 'float64', 'id': None, '_type': 'Value'}, 'length': -1, 'id': None, '_type': 'Sequence'}, 'sampling_rate': {'dtype': 'int64', 'id': None, '_type': 'Value'}, 'label': {'dtype': 'string', 'id': None, '_type': 'Value'}}, 'homepage': '', 'license': '', 'post_processed': None, 'post_processing_size': None, 'size_in_bytes': None, 'splits': None, 'supervised_keys': None, 'task_templates': None, 'version': None}, {'_data_files': [{'filename': 'dataset.arrow'}], '_fingerprint': 'f1702bb5533c549c', '_format_columns': ['speech', 'sampling_rate', 'label'], '_format_kwargs': {}, '_format_type': None, '_indexes': {}, '_indices_data_files': None, '_output_all_columns': False, '_split': None}]\r\n```\r\n\r\n## Expected results\r\n\r\nThe dataset should be loaded\r\n\r\n## Actual results\r\n\r\nAn error occurs\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.1.dev0\r\n- Platform: Linux-5.11.0-1020-aws-x86_64-with-glibc2.31\r\n- Python version: 3.9.6\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3179\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3178","id":1039539076,"node_id":"I_kwDODunzps499huE","number":3178,"title":"\"Property couldn't be hashed properly\" even though fully picklable","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2021-10-29T12:56:09Z","updated_at":"2021-11-18T09:33:31Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to tokenize a dataset with spaCy. I found that no matter what I do, the spaCy language object (`nlp`) prevents `datasets` from pickling correctly - or so the warning says - even though manually pickling is no issue. It should not be an issue either, since spaCy objects are picklable.\r\n\r\n## Steps to reproduce the bug\r\n\r\nHere is a [colab](https:\/\/colab.research.google.com\/drive\/1gt75LCBIzsmBMvvipEOvWulvyZseBiA7?usp=sharing) but for some reason I cannot reproduce it there. That may have to do with logging\/tqdm on Colab, or with running things in notebooks. I tried below code on Windows and Ubuntu as a Python script and getting the same issue (warning below).\r\n\r\n```python\r\nimport pickle\r\n\r\nfrom datasets import load_dataset\r\nimport spacy\r\n\r\n\r\nclass Processor:\r\n def __init__(self):\r\n self.nlp = spacy.load(\"en_core_web_sm\", disable=[\"tagger\", \"parser\", \"ner\", \"lemmatizer\"])\r\n\r\n @staticmethod\r\n def collate(batch):\r\n return [d[\"en\"] for d in batch]\r\n\r\n def parse(self, batch):\r\n batch = batch[\"translation\"]\r\n return {\"translation_tok\": [{\"en_tok\": \" \".join([t.text for t in doc])} for doc in self.nlp.pipe(self.collate(batch))]}\r\n\r\n def process(self):\r\n ds = load_dataset(\"wmt16\", \"de-en\", split=\"train[:10%]\")\r\n ds = ds.map(self.parse, batched=True, num_proc=6)\r\n\r\n\r\nif __name__ == '__main__':\r\n pr = Processor()\r\n\r\n # succeeds\r\n with open(\"temp.pkl\", \"wb\") as f:\r\n pickle.dump(pr, f)\r\n print(\"Successfully pickled!\")\r\n\r\n pr.process()\r\n\r\n```\r\n\r\n---\r\n\r\nHere is a small change that includes `Hasher.hash` that shows that the hasher cannot seem to successfully pickle parts form the NLP object.\r\n\r\n```python\r\n\r\nfrom datasets.fingerprint import Hasher\r\nimport pickle\r\n\r\nfrom datasets import load_dataset\r\nimport spacy\r\n\r\n\r\nclass Processor:\r\n def __init__(self):\r\n self.nlp = spacy.load(\"en_core_web_sm\", disable=[\"tagger\", \"parser\", \"ner\", \"lemmatizer\"])\r\n\r\n @staticmethod\r\n def collate(batch):\r\n return [d[\"en\"] for d in batch]\r\n\r\n def parse(self, batch):\r\n batch = batch[\"translation\"]\r\n return {\"translation_tok\": [{\"en_tok\": \" \".join([t.text for t in doc])} for doc in self.nlp.pipe(self.collate(batch))]}\r\n\r\n def process(self):\r\n ds = load_dataset(\"wmt16\", \"de-en\", split=\"train[:10]\")\r\n return ds.map(self.parse, batched=True)\r\n\r\n\r\nif __name__ == '__main__':\r\n pr = Processor()\r\n\r\n # succeeds\r\n with open(\"temp.pkl\", \"wb\") as f:\r\n pickle.dump(pr, f)\r\n print(\"Successfully pickled class instance!\")\r\n\r\n # succeeds\r\n with open(\"temp.pkl\", \"wb\") as f:\r\n pickle.dump(pr.nlp, f)\r\n print(\"Successfully pickled nlp!\")\r\n\r\n # fails\r\n print(Hasher.hash(pr.nlp))\r\n pr.process()\r\n```\r\n\r\n## Expected results\r\nThis to be picklable, working (fingerprinted), and no warning.\r\n\r\n## Actual results\r\nIn the first snippet, I get this warning \r\nParameter 'function'= of the transform datasets.arrow_dataset.Dataset._map_single couldn't be hashed properly, a random hash was used instead. Make sure your transforms and parameters are serializable with pickle or dill for the dataset fingerprinting and caching to work. If you reuse this transform, the caching mechanism will consider it to be different from the previous calls and recompute everything. This warning is only showed once. Subsequent hashing failures won't be showed.\r\n\r\nIn the second, I get this traceback which directs to the `Hasher.hash` line.\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 918, in save_global\r\n obj2, parent = _getattribute(module, name)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 266, in _getattribute\r\n .format(name, obj))\r\nAttributeError: Can't get local attribute 'add_codes..ErrorsWithCodes' on \r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \" scratch_4.py\", line 40, in \r\n print(Hasher.hash(pr.nlp))\r\n File \" \\lib\\site-packages\\datasets\\fingerprint.py\", line 191, in hash\r\n return cls.hash_default(value)\r\n File \" \\lib\\site-packages\\datasets\\fingerprint.py\", line 184, in hash_default\r\n return cls.hash_bytes(dumps(value))\r\n File \" \\lib\\site-packages\\datasets\\utils\\py_utils.py\", line 345, in dumps\r\n dump(obj, file)\r\n File \" \\lib\\site-packages\\datasets\\utils\\py_utils.py\", line 320, in dump\r\n Pickler(file, recurse=True).dump(obj)\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 498, in dump\r\n StockPickler.dump(self, obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 409, in dump\r\n self.save(obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 521, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 634, in save_reduce\r\n save(state)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 990, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 781, in save_list\r\n self._batch_appends(obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 805, in _batch_appends\r\n save(x)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 736, in save_tuple\r\n save(element)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 521, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 634, in save_reduce\r\n save(state)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 736, in save_tuple\r\n save(element)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 990, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 1176, in save_instancemethod0\r\n pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 610, in save_reduce\r\n save(args)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 736, in save_tuple\r\n save(element)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\datasets\\utils\\py_utils.py\", line 523, in save_function\r\n obj=obj,\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 610, in save_reduce\r\n save(args)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 751, in save_tuple\r\n save(element)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 990, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 521, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 605, in save_reduce\r\n save(cls)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \" \\lib\\site-packages\\dill\\_dill.py\", line 1439, in save_type\r\n StockPickler.save_global(pickler, obj, name=name)\r\n File \" \\Python\\Python36\\lib\\pickle.py\", line 922, in save_global\r\n (obj, module_name, name))\r\n_pickle.PicklingError: Can't pickle .ErrorsWithCodes'>: it's not found as spacy.errors.add_codes..ErrorsWithCodes\r\n```\r\n\r\n## Environment info\r\nTried on both Linux and Windows\r\n\r\n- `datasets` version: 1.14.0\r\n- Platform: Windows-10-10.0.19041-SP0 + Python 3.7.9; Linux-5.11.0-38-generic-x86_64-with-Ubuntu-20.04-focal + Python 3.7.12\r\n- PyArrow version: 6.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3178\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3177","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3177\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3177\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3177\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3177","id":1039487780,"node_id":"I_kwDODunzps499VMk","number":3177,"title":"More control over TQDM when using map\/filter with multiple processes","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-29T11:56:16Z","updated_at":"2021-11-02T11:38:50Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It would help with the clutter in my terminal if tqdm is only shown for rank 0 when using `num_proces>0` in the map and filter methods of datasets.\r\n\r\n```python\r\ndataset.map(lambda examples: tokenize(examples[\"text\"]), batched=True, num_proc=6)\r\n```\r\n\r\nThe above snippet leads to a lot of TQDM bars and depending on your terminal, these will not overwrite but keep pushing each other down.\r\n\r\n```\r\n #0: 0%| | 0\/13 [00:001`","user":{"login":"vlievin","id":9859840,"node_id":"MDQ6VXNlcjk4NTk4NDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9859840?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vlievin","html_url":"https:\/\/github.com\/vlievin","followers_url":"https:\/\/api.github.com\/users\/vlievin\/followers","following_url":"https:\/\/api.github.com\/users\/vlievin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vlievin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vlievin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vlievin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vlievin\/orgs","repos_url":"https:\/\/api.github.com\/users\/vlievin\/repos","events_url":"https:\/\/api.github.com\/users\/vlievin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vlievin\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-10-28T10:29:00Z","updated_at":"2022-02-11T01:31:05Z","closed_at":"2021-11-03T11:26:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI use `datasets.map` to preprocess some data in my application. The error `SystemError 15` is thrown at the end of the execution of `Dataset.map()` (only with `num_proc>1`. Traceback included bellow. \r\n \r\nThe exception is raised only when the code runs within a specific context. Despite ~10h spent investigating this issue, I have failed to isolate the bug, so let me describe my setup. \r\n\r\nIn my project, `Dataset` is wrapped into a `LightningDataModule` and the data is preprocessed when calling `LightningDataModule.setup()`. Calling `.setup()` in an isolated script works fine (even when wrapped with `hydra.main()`). However, when calling `.setup()` within the experiment script (depends on `pytorch_lightning`), the script crashes and `SystemError 15`.\r\n\r\nI could avoid throwing this error by modifying ` Dataset.__del__()` (see bellow), but I believe this only moves the problem somewhere else. I am completely stuck with this issue, any hint would be welcome. \r\n\r\n```python\r\nclass Dataset()\r\n ...\r\n def __del__(self):\r\n if hasattr(self, \"_data\"):\r\n _ = self._data # <- ugly trick that allows avoiding the issue.\r\n del self._data\r\n if hasattr(self, \"_indices\"):\r\n del self._indices\r\n```\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Unfortunately I couldn't isolate the bug.\r\n```\r\n\r\n## Expected results\r\nCalling `Dataset.map()` without throwing an exception. Or at least raising a more detailed exception\/traceback.\r\n\r\n## Actual results\r\n```\r\nException ignored in: \u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 5\/5 [00:05<00:00, 1.17ba\/s]\r\nTraceback (most recent call last):\r\n File \"...\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 906, in __del__\r\n del self._data\r\n File \"...\/python3.8\/site-packages\/ray\/worker.py\", line 1033, in sigterm_handler\r\n sys.exit(signum)\r\nSystemExit: 15\r\n\r\n```\r\n\r\n## Environment info\r\n\r\nTested on 2 environments:\r\n\r\n**Environment 1.**\r\n- `datasets` version: 1.14.0\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.8\r\n- PyArrow version: 6.0.0\r\n\r\n**Environment 2.**\r\n- `datasets` version: 1.14.0\r\n- Platform: Linux-4.18.0-305.19.1.el8_4.x86_64-x86_64-with-glibc2.28\r\n- Python version: 3.9.7\r\n- PyArrow version: 6.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3172\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3172\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3171","id":1037728059,"node_id":"I_kwDODunzps492nk7","number":3171,"title":"Raise exceptions instead of using assertions for control flow","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-27T18:26:52Z","updated_at":"2021-12-23T16:40:37Z","closed_at":"2021-12-23T16:40:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Motivated by https:\/\/github.com\/huggingface\/transformers\/issues\/12789 in Transformers, one welcoming change would be replacing assertions with proper exceptions. The only type of assertions we should keep are those used as sanity checks.\r\n\r\nCurrently, there is a total of 87 files with the `assert` statements (located under `datasets` and `src\/datasets`), so when working on this, to manage the PR size, only modify 4-5 files at most before submitting a PR.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3171\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3170","id":1037601926,"node_id":"PR_kwDODunzps4twDUo","number":3170,"title":"Preserve ordering in `zip_dict`","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-27T16:07:30Z","updated_at":"2021-10-29T13:09:37Z","closed_at":"2021-10-29T13:09:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3170","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3170","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3170.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3170.patch","merged_at":"2021-10-29T13:09:37Z"},"body":"Replace `set` with the `unique_values` generator in `zip_dict`.\r\n\r\nThis PR fixes the problem with the different ordering of the example keys across different Python sessions caused by the `zip_dict` call in `Features.decode_example`. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3170\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3169","id":1036773357,"node_id":"PR_kwDODunzps4ttYmZ","number":3169,"title":"Configurable max filename length in file locks","user":{"login":"lmmx","id":2979452,"node_id":"MDQ6VXNlcjI5Nzk0NTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2979452?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lmmx","html_url":"https:\/\/github.com\/lmmx","followers_url":"https:\/\/api.github.com\/users\/lmmx\/followers","following_url":"https:\/\/api.github.com\/users\/lmmx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lmmx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lmmx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lmmx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lmmx\/orgs","repos_url":"https:\/\/api.github.com\/users\/lmmx\/repos","events_url":"https:\/\/api.github.com\/users\/lmmx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lmmx\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-26T21:52:55Z","updated_at":"2021-10-28T16:14:14Z","closed_at":"2021-10-28T16:14:13Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3169","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3169","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3169.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3169.patch","merged_at":null},"body":"Resolve #2924 (https:\/\/github.com\/huggingface\/datasets\/issues\/2924#issuecomment-952330956) wherein the assumption of file lock maximum filename length to be 255 raises an OSError on encrypted drives (ecryptFS on Linux uses part of the lower filename, reducing the maximum filename size to 143). Allowing this limit to be set in the config module allows this to be modified by users. Will not affect Windows users, as their class passes 255 on init explicitly.\r\n\r\nReproduced with the following example ([the first few lines of a script from Lightning Flash](https:\/\/lightning-flash.readthedocs.io\/en\/latest\/reference\/speech_recognition.html), fine-tuning a HF model):\r\n\r\n```py\r\nimport torch\r\n\r\nimport flash\r\nfrom flash.audio import SpeechRecognition, SpeechRecognitionData\r\nfrom flash.core.data.utils import download_data\r\n\r\n# 1. Create the DataModule\r\ndownload_data(\"https:\/\/pl-flash-data.s3.amazonaws.com\/timit_data.zip\", \".\/data\")\r\n\r\ndatamodule = SpeechRecognitionData.from_json(\r\n input_fields=\"file\",\r\n target_fields=\"text\",\r\n train_file=\"data\/timit\/train.json\",\r\n test_file=\"data\/timit\/test.json\",\r\n)\r\n```\r\n\r\nWhich gave this traceback:\r\n\r\n```py\r\nTraceback (most recent call last):\r\n File \"lf_ft.py\", line 10, in \r\n datamodule = SpeechRecognitionData.from_json(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_module.py\", line 1005, in from_json\r\n return cls.from_data_source(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_module.py\", line 571, in from_data_source\r\n train_dataset, val_dataset, test_dataset, predict_dataset = data_source.to_datasets(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_source.py\", line 307, in to_datasets\r\n train_dataset = self.generate_dataset(train_data, RunningStage.TRAINING)\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_source.py\", line 344, in generate_dataset\r\n data = load_data(data, mock_dataset)\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/audio\/speech_recognition\/data.py\", line 103, in load_data\r\n dataset_dict = load_dataset(self.filetype, data_files={stage: str(file)})\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1599, in load_dataset\r\n builder_instance = load_dataset_builder(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1457, in load_dataset_builder\r\n builder_instance: DatasetBuilder = builder_cls(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 285, in __init__\r\n with FileLock(lock_path):\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/utils\/filelock.py\", line 323, in __enter__\r\n self.acquire()\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/utils\/filelock.py\", line 272, in acquire\r\n self._acquire()\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/datasets\/utils\/filelock.py\", line 403, in _acquire\r\n fd = os.open(self._lock_file, open_mode)\r\nOSError: [Errno 36] File name too long: '\/home\/louis\/.cache\/huggingface\/datasets\/_home_louis_.cache_huggingface_datasets_json_default-98e6813a547f72fa_0.0.0_c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426.lock'\r\n```\r\n\r\nNote the filename is 145 chars long:\r\n\r\n```\r\n>>> len(\"_home_louis_.cache_huggingface_datasets_json_default-98e6813a547f72fa_0.0.0_c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426.lock\")\r\n145\r\n```\r\n\r\nAfter installing datasets as an editable local package and modifying the script I was running to first include:\r\n\r\n```py\r\nimport datasets\r\ndatasets.config.MAX_DATASET_CONFIG_ID_READABLE_LENGTH = 143\r\n```\r\n\r\nThe error goes away.\r\n\r\nIf I instead deliberately set the value incorrectly as 144, the OSError returns:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"lf_ft.py\", line 14, in \r\n datamodule = SpeechRecognitionData.from_json(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_module.py\", line 1005, in from_json\r\n return cls.from_data_source(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_module.py\", line 571, in from_data_source\r\n train_dataset, val_dataset, test_dataset, predict_dataset = data_source.to_datasets(\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_source.py\", line 307, in to_datasets\r\n train_dataset = self.generate_dataset(train_data, RunningStage.TRAINING)\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/core\/data\/data_source.py\", line 344, in generate_dataset\r\n data = load_data(data, mock_dataset)\r\n File \"\/home\/louis\/miniconda3\/envs\/w2vlf\/lib\/python3.8\/site-packages\/flash\/audio\/speech_recognition\/data.py\", line 103, in load_data\r\n dataset_dict = load_dataset(self.filetype, data_files={stage: str(file)})\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/load.py\", line 1605, in load_dataset\r\n builder_instance = load_dataset_builder(\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/load.py\", line 1463, in load_dataset_builder\r\n builder_instance: DatasetBuilder = builder_cls(\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/builder.py\", line 285, in __init__\r\n with FileLock(lock_path):\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/utils\/filelock.py\", line 326, in __enter__\r\n self.acquire()\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/utils\/filelock.py\", line 275, in acquire\r\n self._acquire()\r\n File \"\/home\/louis\/dev\/hf_datasets\/src\/datasets\/utils\/filelock.py\", line 406, in _acquire\r\n fd = os.open(self._lock_file, open_mode)\r\nOSError: [Errno 36] File name too long: '\/home\/louis\/.cache\/huggingface\/datasets\/_home_louis_.cache_huggingface_datasets_json_default-32c812b5c1272d64_0.0.0_c2d554c3377ea79c7664b93dc65d0803b45e3279...-5794079643713042223.lock'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3169\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3168","id":1036673263,"node_id":"I_kwDODunzps49ymDv","number":3168,"title":"OpenSLR\/83 is empty","user":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"assignees":[{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-10-26T19:42:21Z","updated_at":"2021-10-29T10:04:09Z","closed_at":"2021-10-29T10:04:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAs the summary says, openslr \/ SLR83 \/ train is empty.\r\n\r\nThe dataset returned after loading indicates there are **zero** rows. The correct number should be **17877**.\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n\r\n\r\ndatasets.load_dataset('openslr', 'SLR83')\r\n```\r\n\r\n## Expected results\r\n```\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['path', 'audio', 'sentence'],\r\n num_rows: 17877\r\n })\r\n})\r\n```\r\n## Actual results\r\n```\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['path', 'audio', 'sentence'],\r\n num_rows: 0\r\n })\r\n})\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.1.dev0 (master HEAD)\r\n- Platform: Ubuntu 20.04\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3168\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3167","id":1036488992,"node_id":"I_kwDODunzps49x5Eg","number":3167,"title":"bookcorpusopen no longer works","user":{"login":"lucadiliello","id":23355969,"node_id":"MDQ6VXNlcjIzMzU1OTY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23355969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucadiliello","html_url":"https:\/\/github.com\/lucadiliello","followers_url":"https:\/\/api.github.com\/users\/lucadiliello\/followers","following_url":"https:\/\/api.github.com\/users\/lucadiliello\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucadiliello\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucadiliello\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucadiliello\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucadiliello\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucadiliello\/repos","events_url":"https:\/\/api.github.com\/users\/lucadiliello\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucadiliello\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-10-26T16:06:15Z","updated_at":"2021-11-17T15:53:46Z","closed_at":"2021-11-17T15:53:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nWhen using the latest version of datasets (1.14.0), I cannot use the `bookcorpusopen` dataset. The process blocks always around `9924 examples [00:06, 1439.61 examples\/s]` when preparing the dataset. I also noticed that after half an hour the process is automatically killed because of the RAM usage (the machine has 1TB of RAM...).\r\n\r\nThis did not happen with 1.4.1.\r\nI tried also `rm -rf ~\/.cache\/huggingface` but did not help.\r\nChanging python version between 3.7, 3.8 and 3.9 did not help too.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\nd = datasets.load_dataset('bookcorpusopen')\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.0\r\n- Platform: Linux-5.4.0-1054-aws-x86_64-with-glibc2.27\r\n- Python version: 3.9.7\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3167\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3166","id":1036450283,"node_id":"PR_kwDODunzps4tsVQJ","number":3166,"title":"Deprecate prepare_module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-26T15:28:24Z","updated_at":"2021-11-05T09:27:37Z","closed_at":"2021-11-05T09:27:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3166","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3166","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3166.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3166.patch","merged_at":"2021-11-05T09:27:36Z"},"body":"In version 1.13, `prepare_module` was deprecated.\r\n\r\nThis PR adds a deprecation warning and removes it from all the library, using `dataset_module_factory` or `metric_module_factory` instead.\r\n\r\nFix #3165.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3166\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3165","id":1036448998,"node_id":"I_kwDODunzps49xvTm","number":3165,"title":"Deprecate prepare_module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-26T15:27:15Z","updated_at":"2021-11-05T09:27:36Z","closed_at":"2021-11-05T09:27:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In version 1.13, `prepare_module` was deprecated.\r\n\r\nAdd deprecation warning and remove its usage from all the library.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3165\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3164","id":1035662830,"node_id":"I_kwDODunzps49uvXu","number":3164,"title":"Add raw data files to the Hub with GitHub LFS for canonical dataset","user":{"login":"zlucia","id":40370937,"node_id":"MDQ6VXNlcjQwMzcwOTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40370937?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zlucia","html_url":"https:\/\/github.com\/zlucia","followers_url":"https:\/\/api.github.com\/users\/zlucia\/followers","following_url":"https:\/\/api.github.com\/users\/zlucia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zlucia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zlucia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zlucia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zlucia\/orgs","repos_url":"https:\/\/api.github.com\/users\/zlucia\/repos","events_url":"https:\/\/api.github.com\/users\/zlucia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zlucia\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-10-25T23:28:21Z","updated_at":"2021-10-30T19:54:51Z","closed_at":"2021-10-30T19:54:51Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm interested in sharing the CaseHOLD dataset (https:\/\/arxiv.org\/abs\/2104.08671) as a canonical dataset on the HuggingFace Hub and would like to add the raw data files to the Hub with GitHub LFS, since it seems like a more sustainable long term storage solution, compared to other storage solutions available to my team. From what I can tell, this option is not immediately supported if one follows the sharing steps detailed here: [https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html#sharing-a-canonical-dataset](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html#sharing-a-canonical-dataset), since GitHub LFS is not supported for public forks. Is there a way to request this? Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3164\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3163","id":1035475061,"node_id":"PR_kwDODunzps4tpI44","number":3163,"title":"Add Image feature","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":14,"created_at":"2021-10-25T19:07:48Z","updated_at":"2021-12-30T06:37:21Z","closed_at":"2021-12-06T17:49:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3163","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3163","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3163.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3163.patch","merged_at":"2021-12-06T17:49:02Z"},"body":"Adds the Image feature. This feature is heavily inspired by the recently added Audio feature (#2324). Currently, this PR is pretty simple.\r\n\r\nSome considerations that need further discussion:\r\n* I've decided to use `Pillow`\/`PIL` as the image decoding library. Another candidate I considered is `torchvision`, mostly because of its `accimage` backend, which should be faster for loading `jpeg` images than `Pillow`. However, `torchvision`'s io module only supports png and jpeg images, has `torch` as a hard dependency, and requires magic to work with image bytes ( `torch.ByteTensor(torch.ByteStorage.from_buffer(image_bytes)))`).\r\n* Currently, I'm converting `PIL`'s `Image` type to `np.ndarray`. The vision models in Transformers such as ViT prefer the raw `Image` type and not the decoded tensors, so there is a small overhead due to [this conversion](https:\/\/github.com\/huggingface\/transformers\/blob\/3e8761ab8077e3bb243fe2f78b2a682bd2257cf1\/src\/transformers\/image_utils.py#L62-L73). IMO this is justified to keep this part aligned with the Audio feature, which also returns `np.ndarray`. What do you think?\r\n* Still have to work on the channel decoding logic:\r\n * PyTorch prefers the channel-first ordering (C, H, W); TF and Flax the channel-last ordering (H, W, C). One cool feature would be adjusting the channel order based on the selected formatter (`torch`, `tf`, `jax`). \r\n * By default, `Image.open` returns images of shape (H, W, C). However, ViT's feature extractor expects the format (C, H, W) if the image is passed as an array (explained [here](https:\/\/huggingface.co\/transformers\/model_doc\/vit.html#transformers.ViTFeatureExtractor.__call__)), so I'm more inclined to the format (C, H, W). Which one do you prefer, (C, H, W) or (H, W, C)?\r\n* Are there any options you'd like to see? (the user could change those via `cast_column`, such as `sampling_rate` in the Audio feature)\r\n\r\n\r\nTODOs:\r\n* [x] tests\r\n* in subsequent PRs:\r\n * docs - a section in the docs, which gives some additional info on the Image and Audio feature and compares them to \r\n `ArrayND` \r\n * streaming (waiting for #3129 and #3133 to get merged first)\r\n * update the image tasks and the datasets to use the new feature\r\n * Image\/Audio formatting\r\n\r\n[Colab Notebook](https:\/\/colab.research.google.com\/drive\/1mIrTnqTVkWLJWoBzT1ABSe-LFelIep1c?usp=sharing) where you can play with this feature.\r\n\r\nI'm also adding a link to the [Image](https:\/\/github.com\/tensorflow\/datasets\/blob\/7ac7d506488d46038a5854961d068926b3f93c7f\/tensorflow_datasets\/core\/features\/image_feature.py#L155) feature in TFDS because one of our goals is to parse TFDS scripts eventually, so our Image feature has to (at least) support all the formats theirs does.\r\nFeel free to cc anyone who might be interested.\r\n\r\nP.S. Please ignore the changes in the `datasets\/**\/*.py` files \ud83d\ude04.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163\/reactions","total_count":8,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":7,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3163\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3162","id":1035462136,"node_id":"I_kwDODunzps49t-X4","number":3162,"title":"`datasets-cli test` should work with datasets without scripts","user":{"login":"sashavor","id":14205986,"node_id":"MDQ6VXNlcjE0MjA1OTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14205986?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sashavor","html_url":"https:\/\/github.com\/sashavor","followers_url":"https:\/\/api.github.com\/users\/sashavor\/followers","following_url":"https:\/\/api.github.com\/users\/sashavor\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sashavor\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sashavor\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sashavor\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sashavor\/orgs","repos_url":"https:\/\/api.github.com\/users\/sashavor\/repos","events_url":"https:\/\/api.github.com\/users\/sashavor\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sashavor\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-10-25T18:52:30Z","updated_at":"2021-11-25T16:04:29Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It would be really useful to be able to run `datasets-cli test`for datasets that don't have scripts attached to them (whether the datasets are private or not).\r\n\r\nI wasn't able to run the script for a private test dataset that I had created on the hub (https:\/\/huggingface.co\/datasets\/huggingface\/DataMeasurementsTest\/tree\/main) -- although @lhoestq came to save the day!\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3162\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3161","id":1035444292,"node_id":"PR_kwDODunzps4tpCsm","number":3161,"title":"Add riddle_sense dataset","user":{"login":"ziyiwu9494","id":44691149,"node_id":"MDQ6VXNlcjQ0NjkxMTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44691149?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ziyiwu9494","html_url":"https:\/\/github.com\/ziyiwu9494","followers_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/followers","following_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/orgs","repos_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/repos","events_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ziyiwu9494\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-25T18:30:56Z","updated_at":"2021-11-04T14:01:15Z","closed_at":"2021-11-04T14:01:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3161","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3161","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3161.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3161.patch","merged_at":"2021-11-04T14:01:14Z"},"body":"Adding a new dataset for QA with riddles. I'm confused about the tagging process because it looks like the streamlit app loads data from the current repo, so is it something that should be done after merging or off my fork?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3161\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3160","id":1035274640,"node_id":"PR_kwDODunzps4tofO0","number":3160,"title":"Better error msg if `len(predictions)` doesn't match `len(references)` in metrics","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-25T15:25:05Z","updated_at":"2021-11-05T11:44:59Z","closed_at":"2021-11-05T09:31:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3160","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3160","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3160.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3160.patch","merged_at":"2021-11-05T09:31:02Z"},"body":"Improve the error message in `Metric.add_batch` if `len(predictions)` doesn't match `len(references)`.\r\n\r\ncc: @BramVanroy (feel free to test this code on your examples and review this PR)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3160\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3159","id":1035174560,"node_id":"PR_kwDODunzps4toKD5","number":3159,"title":"Make inspect.get_dataset_config_names always return a non-empty list","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-25T13:59:43Z","updated_at":"2021-10-29T13:14:37Z","closed_at":"2021-10-28T05:44:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3159","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3159","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3159.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3159.patch","merged_at":"2021-10-28T05:44:49Z"},"body":"Make all named configs cases, so that no special unnamed config case needs to be handled differently.\r\n\r\nFix #3135.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3159\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3158","id":1035158070,"node_id":"PR_kwDODunzps4toGpe","number":3158,"title":"Fix string encoding for Value type","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-25T13:44:13Z","updated_at":"2021-10-25T14:12:06Z","closed_at":"2021-10-25T14:12:05Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3158","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3158","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3158.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3158.patch","merged_at":"2021-10-25T14:12:05Z"},"body":"Some metrics have `string` features but currently it fails if users pass integers instead. Indeed feature encoding that handles the conversion of the user's objects to the right python type is missing a case for `string`, while it already works as expected for integers, floats and booleans\r\n\r\nHere is an example code that didn't work previously, but that works with this fix:\r\n```python\r\nimport datasets\r\n\r\n# Note that 'id' is an integer while the SQuAD metric uses strings\r\npredictions = [{'prediction_text': '1976', 'id': 5}]\r\nreferences = [{'answers': {'answer_start': [97], 'text': ['1976']}, 'id': 5}] \r\n\r\nsquad_metric = datasets.load_metric(\"squad\") \r\nsquad_metric.add_batch(predictions=predictions, references=references) \r\nresults = squad_metric.compute()\r\n# {'exact_match': 100.0, 'f1': 100.0}\r\n```\r\n\r\ncc @sgugger @philschmid ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3158\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3157","id":1034775165,"node_id":"PR_kwDODunzps4tm3_I","number":3157,"title":"Fixed: duplicate parameter and missing parameter in docstring","user":{"login":"PanQiWei","id":46810637,"node_id":"MDQ6VXNlcjQ2ODEwNjM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46810637?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PanQiWei","html_url":"https:\/\/github.com\/PanQiWei","followers_url":"https:\/\/api.github.com\/users\/PanQiWei\/followers","following_url":"https:\/\/api.github.com\/users\/PanQiWei\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PanQiWei\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PanQiWei\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PanQiWei\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PanQiWei\/orgs","repos_url":"https:\/\/api.github.com\/users\/PanQiWei\/repos","events_url":"https:\/\/api.github.com\/users\/PanQiWei\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PanQiWei\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-25T07:26:00Z","updated_at":"2021-10-25T14:02:19Z","closed_at":"2021-10-25T14:02:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3157","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3157","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3157.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3157.patch","merged_at":"2021-10-25T14:02:18Z"},"body":"changing duplicate parameter `data_files` in `DatasetBuilder.__init__` to the missing parameter `data_dir`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3157\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3156","id":1034478844,"node_id":"I_kwDODunzps49qOT8","number":3156,"title":"Rouge and Meteor for multiple references","user":{"login":"avinashsai","id":22453634,"node_id":"MDQ6VXNlcjIyNDUzNjM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22453634?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/avinashsai","html_url":"https:\/\/github.com\/avinashsai","followers_url":"https:\/\/api.github.com\/users\/avinashsai\/followers","following_url":"https:\/\/api.github.com\/users\/avinashsai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/avinashsai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/avinashsai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/avinashsai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/avinashsai\/orgs","repos_url":"https:\/\/api.github.com\/users\/avinashsai\/repos","events_url":"https:\/\/api.github.com\/users\/avinashsai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/avinashsai\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-10-24T18:08:51Z","updated_at":"2021-12-12T06:16:26Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nCurrently rogue and meteor supports only single references. Can we use these metrics to calculate for multiple references?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3156\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3155","id":1034468757,"node_id":"I_kwDODunzps49qL2V","number":3155,"title":"Illegal instruction (core dumped) at datasets import","user":{"login":"hacobe","id":91226467,"node_id":"MDQ6VXNlcjkxMjI2NDY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/91226467?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hacobe","html_url":"https:\/\/github.com\/hacobe","followers_url":"https:\/\/api.github.com\/users\/hacobe\/followers","following_url":"https:\/\/api.github.com\/users\/hacobe\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hacobe\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hacobe\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hacobe\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hacobe\/orgs","repos_url":"https:\/\/api.github.com\/users\/hacobe\/repos","events_url":"https:\/\/api.github.com\/users\/hacobe\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hacobe\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-24T17:21:36Z","updated_at":"2021-11-18T19:07:04Z","closed_at":"2021-11-18T19:07:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI install datasets using conda and when I import datasets I get: \"Illegal instruction (core dumped)\"\r\n\r\n## Steps to reproduce the bug\r\n\r\n```\r\nconda create --prefix path\/to\/env\r\nconda activate path\/to\/env\r\nconda install -c huggingface -c conda-forge datasets\r\n# exits with output \"Illegal instruction (core dumped)\"\r\npython -m datasets\r\n```\r\n\r\n## Environment info\r\n\r\nWhen I run \"datasets-cli env\", I also get \"Illegal instruction (core dumped)\"\r\n\r\nIf I run the following commands:\r\n\r\n```\r\nconda create --prefix path\/to\/another\/new\/env\r\nconda activate path\/to\/another\/new\/env\r\nconda install -c huggingface transformers\r\ntransformers-cli env\r\n```\r\n\r\nThen I get:\r\n\r\n- `transformers` version: 4.11.3\r\n- Platform: Linux-5.4.0-67-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.12\r\n- PyTorch version (GPU?): not installed (NA)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Flax version (CPU?\/GPU?\/TPU?): not installed (NA)\r\n- Jax version: not installed\r\n- JaxLib version: not installed\r\n- Using GPU in script?: No\r\n- Using distributed or parallel set-up in script?: No\r\n\r\nLet me know what additional information you need in order to debug this issue. Thanks in advance!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3155\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3154","id":1034361806,"node_id":"I_kwDODunzps49pxvO","number":3154,"title":"Sacrebleu unexpected behaviour\/requirement for data format","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-24T08:55:33Z","updated_at":"2021-10-31T09:08:32Z","closed_at":"2021-10-31T09:08:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen comparing with the original `sacrebleu` implementation, the `datasets` implementation does some strange things that I do not quite understand. This issue was triggered when I was trying to implement TER and found the datasets implementation of BLEU [here](https:\/\/github.com\/huggingface\/datasets\/pull\/3153).\r\n\r\nIn the below snippet, the original sacrebleu snippet works just fine whereas the datasets implementation throws an error.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport sacrebleu\r\nimport datasets\r\n\r\nrefs = [\r\n ['The dog bit the man.', 'It was not unexpected.', 'The man bit him first.'],\r\n ['The dog had bit the man.', 'No one was surprised.', 'The man had bitten the dog.'],\r\n]\r\n\r\nhyps = ['The dog bit the man.', \"It wasn't surprising.\", 'The man had just bitten him.']\r\n\r\nexpected_bleu = 48.530827\r\n\r\nds_bleu = datasets.load_metric(\"sacrebleu\")\r\n\r\nbleu_score_sb = sacrebleu.corpus_bleu(hyps, refs).score\r\nprint(bleu_score_sb, expected_bleu)\r\n# works: 48.5308...\r\nbleu_score_ds = ds_bleu.compute(predictions=hyps, references=refs)[\"score\"]\r\nprint(bleu_score_ds, expected_bleu)\r\n# ValueError: Predictions and\/or references don't match the expected format.\r\n```\r\nThis seems to be related to how datasets forces the features format here:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/87c71b9c29a40958973004910f97e4892559dfed\/metrics\/sacrebleu\/sacrebleu.py#L94-L99\r\n\r\nand then manipulates the references during the compute stage here\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/87c71b9c29a40958973004910f97e4892559dfed\/metrics\/sacrebleu\/sacrebleu.py#L119-L122\r\n\r\nI do not quite understand why that is required since sacrebleu handles argument parsing quite well [by itself](https:\/\/github.com\/mjpost\/sacrebleu\/blob\/2787185dd0f8d224c72ee5a831d163c2ac711a47\/sacrebleu\/metrics\/base.py#L229). \r\n## Actual results\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\bramv\\AppData\\Roaming\\JetBrains\\PyCharm2020.3\\scratches\\scratch_23.py\", line 23, in \r\n bleu_score_ds = ds_bleu.compute(predictions=hyps, references=refs)[\"score\"]\r\n File \"C:\\dev\\python\\datasets\\src\\datasets\\metric.py\", line 392, in compute\r\n self.add_batch(predictions=predictions, references=references)\r\n File \"C:\\dev\\python\\datasets\\src\\datasets\\metric.py\", line 439, in add_batch\r\n raise ValueError(\r\nValueError: Predictions and\/or references don't match the expected format.\r\nExpected format: {'predictions': Value(dtype='string', id='sequence'), 'references': Sequence(feature=Value(dtype='string', id='sequence'), length=-1, id='references')},\r\nInput predictions: ['The dog bit the man.', \"It wasn't surprising.\", 'The man had just bitten him.'],\r\nInput references: [['The dog bit the man.', 'It was not unexpected.', 'The man bit him first.'], ['The dog had bit the man.', 'No one was surprised.', 'The man had bitten the dog.']]\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.1.dev0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.9.2\r\n- PyArrow version: 4.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3154\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3153","id":1034179198,"node_id":"PR_kwDODunzps4tlEVE","number":3153,"title":"Add TER (as implemented in sacrebleu)","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-23T14:26:45Z","updated_at":"2021-11-02T11:04:11Z","closed_at":"2021-11-02T11:04:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3153","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3153","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3153.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3153.patch","merged_at":"2021-11-02T11:04:11Z"},"body":"Implements TER (Translation Edit Rate) as per its implementation in sacrebleu. Sacrebleu for BLEU scores is already implemented in `datasets` so I thought this would be a nice addition.\r\n\r\nI started from the sacrebleu implementation, as the two metrics have a lot in common.\r\n\r\nVerified with sacrebleu's [testing suite](https:\/\/github.com\/mjpost\/sacrebleu\/blob\/078c440168c6adc89ba75fe6d63f0d922d42bcfe\/test\/test_ter.py) that this indeed works as intended.\r\n\r\n```python\r\nimport datasets\r\n\r\n\r\ntest_cases = [\r\n (['aaaa bbbb cccc dddd'], ['aaaa bbbb cccc dddd'], 0), # perfect match\r\n (['dddd eeee ffff'], ['aaaa bbbb cccc'], 1), # no overlap\r\n ([''], ['a'], 1), # corner case, empty hypothesis\r\n (['d e f g h a b c'], ['a b c d e f g h'], 1 \/ 8), # a single shift fixes MT\r\n (\r\n [\r\n 'w\u00e4hlen Sie \" Bild neu berechnen , \" um beim \u00c4ndern der Bildgr\u00f6\u00dfe Pixel hinzuzuf\u00fcgen oder zu entfernen , damit das Bild ungef\u00e4hr dieselbe Gr\u00f6\u00dfe aufweist wie die andere Gr\u00f6\u00dfe .',\r\n 'wenn Sie alle Aufgaben im aktuellen Dokument aktualisieren m\u00f6chten , w\u00e4hlen Sie im Men\u00fc des Aufgabenbedienfelds die Option \" Alle Aufgaben aktualisieren . \"',\r\n 'klicken Sie auf der Registerkarte \" Optionen \" auf die Schaltfl\u00e4che \" Benutzerdefiniert \" und geben Sie Werte f\u00fcr \" Fehlerkorrektur-Level \" und \" Y \/ X-Verh\u00e4ltnis \" ein .',\r\n 'Sie k\u00f6nnen beispielsweise ein Dokument erstellen , das ein Auto \u00fcber die B\u00fchne enth\u00e4lt .',\r\n 'w\u00e4hlen Sie im Dialogfeld \" Neu aus Vorlage \" eine Vorlage aus und klicken Sie auf \" Neu . \"',\r\n ],\r\n [\r\n 'w\u00e4hlen Sie \" Bild neu berechnen , \" um beim \u00c4ndern der Bildgr\u00f6\u00dfe Pixel hinzuzuf\u00fcgen oder zu entfernen , damit die Darstellung des Bildes in einer anderen Gr\u00f6\u00dfe beibehalten wird .',\r\n 'wenn Sie alle Aufgaben im aktuellen Dokument aktualisieren m\u00f6chten , w\u00e4hlen Sie im Men\u00fc des Aufgabenbedienfelds die Option \" Alle Aufgaben aktualisieren . \"',\r\n 'klicken Sie auf der Registerkarte \" Optionen \" auf die Schaltfl\u00e4che \" Benutzerdefiniert \" und geben Sie f\u00fcr \" Fehlerkorrektur-Level \" und \" Y \/ X-Verh\u00e4ltnis \" niedrigere Werte ein .',\r\n 'Sie k\u00f6nnen beispielsweise ein Dokument erstellen , das ein Auto enthalt , das sich \u00fcber die B\u00fchne bewegt .',\r\n 'w\u00e4hlen Sie im Dialogfeld \" Neu aus Vorlage \" eine Vorlage aus und klicken Sie auf \" Neu . \"',\r\n ],\r\n 0.136 # realistic example from WMT dev data (2019)\r\n ),\r\n]\r\n\r\nter = datasets.load_metric(r\"path\\to\\datasets\\metrics\\ter\")\r\n\r\npredictions = [\"hello there general kenobi\", \"foo bar foobar\"]\r\nreferences = [[\"hello there general kenobi\", \"hello there !\"], [\"foo bar foobar\", \"foo bar foobar\"]]\r\nprint(ter.compute(predictions=predictions, references=references))\r\n\r\nfor hyp, ref, score in test_cases:\r\n # Note the reference transformation which is different from scarebleu's input format\r\n results = ter.compute(predictions=hyp, references=[[r] for r in ref])\r\n assert 100*score == results[\"score\"], f\"expected {100*score}, got {results['score']}\"\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3153\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3152","id":1034039379,"node_id":"PR_kwDODunzps4tkqi-","number":3152,"title":"Fix some typos in the documentation","user":{"login":"h4iku","id":3812788,"node_id":"MDQ6VXNlcjM4MTI3ODg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3812788?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/h4iku","html_url":"https:\/\/github.com\/h4iku","followers_url":"https:\/\/api.github.com\/users\/h4iku\/followers","following_url":"https:\/\/api.github.com\/users\/h4iku\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/h4iku\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/h4iku\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/h4iku\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/h4iku\/orgs","repos_url":"https:\/\/api.github.com\/users\/h4iku\/repos","events_url":"https:\/\/api.github.com\/users\/h4iku\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/h4iku\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-23T01:38:35Z","updated_at":"2021-10-25T14:27:36Z","closed_at":"2021-10-25T14:03:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3152","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3152","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3152.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3152.patch","merged_at":"2021-10-25T14:03:48Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3152\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3151","id":1033890501,"node_id":"PR_kwDODunzps4tkL7t","number":3151,"title":"Re-add faiss to windows testing suite","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-22T19:34:29Z","updated_at":"2021-11-02T10:47:34Z","closed_at":"2021-11-02T10:06:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3151","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3151","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3151.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3151.patch","merged_at":"2021-11-02T10:06:03Z"},"body":"In recent versions, `faiss-cpu` seems to be available for Windows as well. See the [PyPi page](https:\/\/pypi.org\/project\/faiss-cpu\/#files) to confirm. We can therefore included it for Windows in the setup file.\r\n\r\nAt first tests didn't pass due to problems with permissions as caused by `NamedTemporaryFile` on Windows. This built-in library is notoriously poor in playing nice on Windows. The required change isn't pretty, but it works. First set `delete=False` to not automatically try to delete the file on `exit`. Then, manually delete the file with `unlink`. It's weird, I know, but it works.\r\n\r\n```python\r\nwith tempfile.NamedTemporaryFile(delete=False) as tmp_file:\r\n # do stuff\r\nos.unlink(tmp_file.name)\r\n```\r\n\r\ncloses #3150 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3151\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3150","id":1033831530,"node_id":"I_kwDODunzps49nwRq","number":3150,"title":"Faiss _is_ available on Windows","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-22T18:07:16Z","updated_at":"2021-11-02T10:06:03Z","closed_at":"2021-11-02T10:06:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the setup file, I find the following:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/87c71b9c29a40958973004910f97e4892559dfed\/setup.py#L171\r\n\r\nHowever, FAISS does install perfectly fine on Windows on my system. You can also confirm this on the [PyPi page](https:\/\/pypi.org\/project\/faiss-cpu\/#files), where Windows wheels are available. Maybe this was true for older versions? For current versions, this can be removed I think.\r\n\r\n(This isn't really a bug but didn't know how else to tag.)\r\n\r\nIf you agree I can do a quick PR and remove that line.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3150\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3149","id":1033747625,"node_id":"PR_kwDODunzps4tjuUt","number":3149,"title":"Add CMU Hinglish DoG Dataset for MT","user":{"login":"Ishan-Kumar2","id":46553104,"node_id":"MDQ6VXNlcjQ2NTUzMTA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46553104?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Ishan-Kumar2","html_url":"https:\/\/github.com\/Ishan-Kumar2","followers_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/followers","following_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/orgs","repos_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/repos","events_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-22T16:17:25Z","updated_at":"2021-11-15T11:36:42Z","closed_at":"2021-11-15T10:27:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3149","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3149","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3149.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3149.patch","merged_at":"2021-11-15T10:27:45Z"},"body":"Address part of #2841 \r\n\r\nAdded the CMU Hinglish DoG Dataset as in GLUECoS. Added it as a seperate dataset as unlike other tasks of GLUE CoS this can't be evaluated for a BERT like model. \r\nConsists of parallel dataset between Hinglish (Hindi-English) and English, can be used for Machine Translation between the two. \r\n\r\nThe data processing part is inspired from the GLUECoS repo [here](https:\/\/github.com\/microsoft\/GLUECoS\/blob\/7fdc51653e37a32aee17505c47b7d1da364fa77e\/Data\/Preprocess_Scripts\/preprocess_mt_en_hi.py)\r\nThe dummy data part is not working properly, it shows \r\n``` UnboundLocalError: local variable 'generator_splits' referenced before assignment ``` \r\nwhen I run without ``--auto_generate``.\r\n\r\nPlease let me know how I can fix that.\r\nThanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3149\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3148","id":1033685208,"node_id":"I_kwDODunzps49nMjY","number":3148,"title":"Streaming with num_workers != 0","user":{"login":"justheuristic","id":3491902,"node_id":"MDQ6VXNlcjM0OTE5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3491902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justheuristic","html_url":"https:\/\/github.com\/justheuristic","followers_url":"https:\/\/api.github.com\/users\/justheuristic\/followers","following_url":"https:\/\/api.github.com\/users\/justheuristic\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justheuristic\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justheuristic\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justheuristic\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justheuristic\/orgs","repos_url":"https:\/\/api.github.com\/users\/justheuristic\/repos","events_url":"https:\/\/api.github.com\/users\/justheuristic\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justheuristic\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-10-22T15:07:17Z","updated_at":"2022-01-05T14:30:49Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen using dataset streaming with pytorch DataLoader, the setting num_workers to anything other than 0 causes the code to freeze forever before yielding the first batch.\r\n\r\nThe code owner is likely @lhoestq \r\n\r\n## Steps to reproduce the bug\r\n\r\nFor your convenience, we've prepped a colab notebook that reproduces the bug\r\nhttps:\/\/colab.research.google.com\/drive\/1Mgl0oTZSNIE3UeGl_oX9wPCOIxRg19h1?usp=sharing\r\n```python\r\n!pip install datasets==1.14.0\r\n\r\nshould_freeze_forever = True\r\n# ^-- set this to True in order to freeze forever, set to False in order to work normally\r\n\r\nimport torch\r\nfrom datasets import load_dataset\r\n\r\ndata = load_dataset(\"oscar\", \"unshuffled_deduplicated_bn\", split=\"train\", streaming=True)\r\ndata = data.map(lambda x: {\"text\": x[\"text\"], \"orig\": f\"oscar[{x['id']}]\"}, batched=True)\r\ndata = data.shuffle(100, seed=1337)\r\n\r\ndata = data.with_format(\"torch\")\r\nloader = torch.utils.data.DataLoader(data, batch_size=2, num_workers=2 if should_freeze_forever else 0)\r\n\r\n# v-- the code should freeze forever at this line\r\nfor i, row in enumerate(loader):\r\n print(row)\r\n if i > 10: break\r\nprint(\"DONE!\")\r\n```\r\n\r\n## Expected results\r\nThe code should not freeze forever with num_workers=2\r\n\r\n## Actual results\r\nThe code freezes forever with num_workers=2\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.0 (also found in previous versions)\r\n- Platform: google colab (also locally)\r\n- Python version: 3.7, (also 3.8)\r\n- PyArrow version: 3.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3148\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3147","id":1033607659,"node_id":"PR_kwDODunzps4tjRHG","number":3147,"title":"Fix CLI test to ignore verfications when saving infos","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-22T13:52:46Z","updated_at":"2021-10-27T08:01:50Z","closed_at":"2021-10-27T08:01:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3147","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3147","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3147.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3147.patch","merged_at":"2021-10-27T08:01:49Z"},"body":"Fix #3146.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3147\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3146","id":1033605947,"node_id":"I_kwDODunzps49m5M7","number":3146,"title":"CLI test command throws NonMatchingSplitsSizesError when saving infos","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-22T13:50:53Z","updated_at":"2021-10-27T08:01:49Z","closed_at":"2021-10-27T08:01:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When trying to generate a datset JSON metadata, a `NonMatchingSplitsSizesError` is thrown:\r\n```\r\n$ datasets-cli test datasets\/arabic_billion_words --save_infos --all_configs\r\nTesting builder 'Alittihad' (1\/10)\r\nDownloading and preparing dataset arabic_billion_words\/Alittihad (download: 332.13 MiB, generated: Unknown size, post-processed: Unknown size, total: 332.13 MiB) to .cache\\arabic_billion_words\\Alittihad\\1.1.0\\8175ff1c9714c6d5d15b1141b6042e5edf048276bb81a9c14e35e149a7a62ae4...\r\nTraceback (most recent call last):\r\n File \"path\\huggingface\\datasets\\.venv\\Scripts\\datasets-cli-script.py\", line 33, in \r\n sys.exit(load_entry_point('datasets', 'console_scripts', 'datasets-cli')())\r\n File \"path\\huggingface\\datasets\\src\\datasets\\commands\\datasets_cli.py\", line 33, in main\r\n service.run()\r\n File \"path\\huggingface\\datasets\\src\\datasets\\commands\\test.py\", line 144, in run\r\n builder.download_and_prepare(\r\n File \"path\\huggingface\\datasets\\src\\datasets\\builder.py\", line 607, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"path\\huggingface\\datasets\\src\\datasets\\builder.py\", line 709, in _download_and_prepare\r\n verify_splits(self.info.splits, split_dict)\r\n File \"path\\huggingface\\datasets\\src\\datasets\\utils\\info_utils.py\", line 74, in verify_splits\r\n raise NonMatchingSplitsSizesError(str(bad_splits))\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=0, num_examples=0, dataset_name='arabic_billion_words'), 'recorded': SplitInfo(name='train', num_bytes=1601790302, num_examples=349342, dataset_name='arabic_billion_words')}]\r\n```\r\n\r\nThis is due because a previous run generated a wrong `dataset_info.json`.\r\n\r\nThis error can be avoided by passing `--ignore_verifications`, but I think this should be assumed when passing `--save_infos`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3146\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3145","id":1033580009,"node_id":"I_kwDODunzps49my3p","number":3145,"title":"[when Image type will exist] provide a way to get the data as binary + filename","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-22T13:23:49Z","updated_at":"2021-12-22T11:05:37Z","closed_at":"2021-12-22T11:05:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nWhen a dataset cell contains a value of type Image (be it from a remote URL, an Array2D\/3D, or any other way to represent images), I want to be able to write the image to the disk, with the correct filename, and optionally to know its mimetype, in order to serve it on the web.\r\n\r\nNote: this issue would apply exactly the same for the `Audio` type.\r\n\r\n**Describe the solution you'd like**\r\n\r\nIf a \"cell\" has the type `Image`, provide a way to get the binary content of the file, and the filename, eg as:\r\n\r\n```python\r\n filename: str\r\n data: bytes\r\n```\r\n\r\n**Describe alternatives you've considered**\r\n\r\nA way to write the cell to the disk (passing a local directory), and then return the pathname, filename, and mimetype.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3145\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3144","id":1033573760,"node_id":"I_kwDODunzps49mxWA","number":3144,"title":"Infer the features if missing","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-22T13:17:33Z","updated_at":"2021-10-22T13:17:33Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nSome datasets, in particular community datasets, have no info file, thus no features.\r\n\r\n**Describe the solution you'd like**\r\n\r\nIf a dataset has no features, the first loaded data (5-10 rows) could be used to infer the type.\r\n\r\nRelated: `datasets` would provide a way to load the data, and get the rows AND the features as the result.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nThe HF hub could also provide some UI to help the dataset maintainers to explicit the types of their rows, or automatically infer them as an initial proposal.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3144\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3143","id":1033569655,"node_id":"I_kwDODunzps49mwV3","number":3143,"title":"Provide a way to check if the features (in info) match with the data of a split","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-22T13:13:36Z","updated_at":"2021-10-22T13:17:56Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nI understand that currently the data loaded has not always the type described in the info features\r\n\r\n**Describe the solution you'd like**\r\n\r\nProvide a way to check if the rows have the type described by info features\r\n\r\n**Describe alternatives you've considered**\r\n\r\nAlways check it, and raise an error when loading the data if their type doesn't match the features.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3143\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3142","id":1033566034,"node_id":"I_kwDODunzps49mvdS","number":3142,"title":"Provide a way to write a streamed dataset to the disk","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-22T13:09:53Z","updated_at":"2021-10-29T11:14:39Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nThe streaming mode allows to get the 100 first rows of a dataset very quickly. But it does not cache the answer, so a posterior call to get the same 100 rows will send a request to the server again and again.\r\n\r\n**Describe the solution you'd like**\r\n\r\nProvide a way to write the streamed rows of a dataset on the disk, and to load from it later.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nProvide a third mode: `lazy`, which would use the local cache for the data that have already been fetched previously, and use streaming to get the rest of the requested data. \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3142\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3141","id":1033555910,"node_id":"PR_kwDODunzps4tjGYz","number":3141,"title":"Fix caching bugs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-22T12:59:25Z","updated_at":"2021-10-22T20:52:08Z","closed_at":"2021-10-22T13:47:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3141","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3141","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3141.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3141.patch","merged_at":"2021-10-22T13:47:04Z"},"body":"This PR fixes some caching bugs (most likely introduced in the latest refactor):\r\n* remove \")\" added by accident in the dataset dir name\r\n* correctly pass the namespace kwargs in `CachedDatasetModuleFactory`\r\n* improve the warning message if `HF_DATASETS_OFFLINE is `True`\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3141\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3140","id":1033524132,"node_id":"I_kwDODunzps49mlOk","number":3140,"title":"Add DER metric","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"assignees":[{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-22T12:22:11Z","updated_at":"2021-10-22T12:22:28Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Add DER metric for speaker diarization task.\r\n\r\nThis is used by SUPERB beenchmark, for example.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3140\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3139","id":1033524079,"node_id":"I_kwDODunzps49mlNv","number":3139,"title":"Fix file\/directory deletion on Windows ","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-22T12:22:08Z","updated_at":"2021-10-22T12:22:08Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, on Windows, some attempts to delete a dataset file\/directory will fail with the `PerimissionError`. \r\n\r\nExamples:\r\n- download a dataset, then force redownload it in the same session while keeping a reference to the downloaded dataset\r\n```python\r\nfrom datasets import load_dataset\r\ndset = load_dataset(\"sst\", split=\"train\")\r\ndset = load_dataset(\"sst\", split=\"train\", download_mode=\"force_redownload\")\r\n```\r\n- try to clean up the cache files while keeping a reference to those files (via the mapped dataset):\r\n```python\r\nfrom datasets import load_dataset\r\ndset = load_dataset(\"sst\", split=\"train\")\r\ndset_mapped = dset.map(lambda _: {\"dummy_col\": 1})\r\ndset.cleanup_cache_files()\r\n```\r\nWe should fix those.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3139\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3138","id":1033379997,"node_id":"I_kwDODunzps49mCCd","number":3138,"title":"More fine-grained taxonomy of error types","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-22T09:35:29Z","updated_at":"2021-10-22T09:35:35Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nExceptions like `FileNotFoundError` can be raised by different parts of the code, and it's hard to detect which one did\r\n\r\n**Describe the solution you'd like**\r\n\r\nGive a specific exception type for every group of similar errors\r\n\r\n**Describe alternatives you've considered**\r\n\r\nRely on the error message, using regex\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3138\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3137","id":1033363652,"node_id":"PR_kwDODunzps4tievk","number":3137,"title":"Fix numpy deprecation warning for ragged tensors","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-22T09:17:46Z","updated_at":"2021-10-22T16:04:15Z","closed_at":"2021-10-22T16:04:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3137","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3137","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3137.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3137.patch","merged_at":"2021-10-22T16:04:14Z"},"body":"Numpy shows a deprecation warning when we call `np.array` on a list of ragged tensors without specifying the `dtype`. If their shapes match, the tensors can be collated together, otherwise the resulting array should have `dtype=np.object`.\r\n\r\nFix #3084 \r\ncc @Rocketknight1 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3137\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3136","id":1033360396,"node_id":"PR_kwDODunzps4tieFi","number":3136,"title":"Fix script of Arabic Billion Words dataset to return all data","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-22T09:14:24Z","updated_at":"2021-10-22T13:28:41Z","closed_at":"2021-10-22T13:28:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3136","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3136","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3136.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3136.patch","merged_at":"2021-10-22T13:28:39Z"},"body":"The script has a bug and only parses and generates a portion of the entire dataset.\r\n\r\nThis PR fixes the loading script so that is properly parses the entire dataset. \r\n\r\nCurrent implementation generates the same number of examples as reported in the [original paper](https:\/\/arxiv.org\/abs\/1611.04033) for all configurations except for one:\r\n- For \"Youm7\" we generate more examples (1172136) than the ones reported by the paper (1025027)\r\n\r\n| | Number of examples | Number of examples according to the source |\r\n|:---------------|-------------------:|-----:|\r\n| Alittihad | 349342 |349342 |\r\n| Almasryalyoum | 291723 |291723 |\r\n| Almustaqbal | 446873 |446873 |\r\n| Alqabas | 817274 |817274 |\r\n| Echoroukonline | 139732 |139732 |\r\n| Ryiadh | 858188 | 858188 |\r\n| Sabanews | 92149 |92149 |\r\n| SaudiYoum | 888068 |888068 |\r\n| Techreen | 314597 |314597 |\r\n| Youm7 | 1172136 |1025027 |\r\n\r\nFix #3126.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3136\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3135","id":1033294299,"node_id":"I_kwDODunzps49ltHb","number":3135,"title":"Make inspect.get_dataset_config_names always return a non-empty list of configs","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-10-22T08:02:50Z","updated_at":"2021-10-28T05:44:49Z","closed_at":"2021-10-28T05:44:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nCurrently, some datasets have a configuration, while others don't. It would be simpler for the user to always have configuration names to refer to\r\n\r\n**Describe the solution you'd like**\r\n\r\nIn that sense inspect.get_dataset_config_names should always return at least one configuration name, be it `default` or `Check___region_1` (for community datasets like `Check\/region_1`).\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/c5747a5e1dde2670b7f2ca6e79e2ffd99dff85af\/src\/datasets\/inspect.py#L161\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3135\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3134","id":1033251755,"node_id":"I_kwDODunzps49liur","number":3134,"title":"Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.11.0\/metrics\/rouge\/rouge.py","user":{"login":"yananchen1989","id":26405281,"node_id":"MDQ6VXNlcjI2NDA1Mjgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26405281?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yananchen1989","html_url":"https:\/\/github.com\/yananchen1989","followers_url":"https:\/\/api.github.com\/users\/yananchen1989\/followers","following_url":"https:\/\/api.github.com\/users\/yananchen1989\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yananchen1989\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yananchen1989\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yananchen1989\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yananchen1989\/orgs","repos_url":"https:\/\/api.github.com\/users\/yananchen1989\/repos","events_url":"https:\/\/api.github.com\/users\/yananchen1989\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yananchen1989\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-10-22T07:07:52Z","updated_at":"2022-01-19T14:02:32Z","closed_at":"2022-01-19T14:02:31Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"datasets version: 1.12.1\r\n\r\n`metric = datasets.load_metric('rouge')`\r\n\r\nThe error:\r\n\r\n> ConnectionError Traceback (most recent call last)\r\n> in \r\n> ----> 1 metric = datasets.load_metric('rouge')\r\n> \r\n> \/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in load_metric(path, config_name, process_id, num_process, cache_dir, experiment_id, keep_in_memory, download_config, download_mode, script_version, **metric_init_kwargs)\r\n> 613 download_config=download_config,\r\n> 614 download_mode=download_mode,\r\n> --> 615 dataset=False,\r\n> 616 )\r\n> 617 metric_cls = import_main_class(module_path, dataset=False)\r\n> \r\n> \/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, dynamic_modules_path, return_resolved_file_path, **download_kwargs)\r\n> 328 file_path = hf_github_url(path=path, name=name, dataset=dataset, version=script_version)\r\n> 329 try:\r\n> --> 330 local_path = cached_path(file_path, download_config=download_config)\r\n> 331 except FileNotFoundError:\r\n> 332 if script_version is not None:\r\n> \r\n> \/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py in cached_path(url_or_filename, download_config, **download_kwargs)\r\n> 296 use_etag=download_config.use_etag,\r\n> 297 max_retries=download_config.max_retries,\r\n> --> 298 use_auth_token=download_config.use_auth_token,\r\n> 299 )\r\n> 300 elif os.path.exists(url_or_filename):\r\n> \r\n> \/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag, max_retries, use_auth_token)\r\n> 603 raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\n> 604 _raise_if_offline_mode_is_enabled(f\"Tried to reach {url}\")\r\n> --> 605 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n> 606\r\n> 607 # Try a second time\r\n> \r\n> ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.11.0\/metrics\/rouge\/rouge.py\r\n\r\n\r\nIs there any remedy to solve the connection issue ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3134\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3133","id":1032511710,"node_id":"PR_kwDODunzps4tftyZ","number":3133,"title":"Support Audio feature in streaming mode","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-21T13:37:57Z","updated_at":"2021-11-12T14:13:05Z","closed_at":"2021-11-12T14:13:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3133","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3133","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3133.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3133.patch","merged_at":"2021-11-12T14:13:04Z"},"body":"Fix #3132.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3133\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3132","id":1032505430,"node_id":"I_kwDODunzps49ishW","number":3132,"title":"Support Audio feature in streaming mode","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-21T13:32:18Z","updated_at":"2021-11-12T14:13:04Z","closed_at":"2021-11-12T14:13:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, Audio feature is only supported for non-streaming datasets.\r\n\r\nDue to the large size of many speech datasets, we should also support Audio feature in streaming mode.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3132\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3131","id":1032309865,"node_id":"I_kwDODunzps49h8xp","number":3131,"title":"Add ADE20k","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-21T10:13:09Z","updated_at":"2021-12-08T12:01:00Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** ADE20k (actually it's called the MIT Scene Parsing Benchmark, it's actually a subset of ADE20k but a lot of authors still call it ADE20k)\r\n- **Description:** A semantic segmentation dataset, consisting of 150 classes.\r\n- **Paper:** http:\/\/people.csail.mit.edu\/bzhou\/publication\/scene-parse-camera-ready.pdf\r\n- **Data:** http:\/\/sceneparsing.csail.mit.edu\/\r\n- **Motivation:** I am currently adding Transformer-based semantic segmentation models that achieve SOTA on this dataset. It would be great to directly access this dataset using HuggingFace Datasets, in order to make example scripts in HuggingFace Transformers.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3131\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3130","id":1032299417,"node_id":"PR_kwDODunzps4tfBJU","number":3130,"title":"Create SECURITY.md","user":{"login":"zidingz","id":28839565,"node_id":"MDQ6VXNlcjI4ODM5NTY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28839565?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zidingz","html_url":"https:\/\/github.com\/zidingz","followers_url":"https:\/\/api.github.com\/users\/zidingz\/followers","following_url":"https:\/\/api.github.com\/users\/zidingz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zidingz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zidingz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zidingz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zidingz\/orgs","repos_url":"https:\/\/api.github.com\/users\/zidingz\/repos","events_url":"https:\/\/api.github.com\/users\/zidingz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zidingz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-21T10:03:03Z","updated_at":"2021-10-21T14:33:28Z","closed_at":"2021-10-21T14:31:50Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3130","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3130","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3130.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3130.patch","merged_at":null},"body":"To let the repository confirm feedback@huggingface.co as its security contact.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3130\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3129","id":1032234167,"node_id":"PR_kwDODunzps4tezlA","number":3129,"title":"Support Audio feature for TAR archives in sequential access","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-10-21T08:56:51Z","updated_at":"2021-11-17T17:42:08Z","closed_at":"2021-11-17T17:42:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3129","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3129","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3129.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3129.patch","merged_at":"2021-11-17T17:42:07Z"},"body":"Add Audio feature support for TAR archived files in sequential access.\r\n\r\nFix #3128.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3129\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3128","id":1032201870,"node_id":"I_kwDODunzps49hiaO","number":3128,"title":"Support Audio feature for TAR archives in sequential access","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-21T08:23:01Z","updated_at":"2021-11-17T17:42:07Z","closed_at":"2021-11-17T17:42:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, Audio feature accesses each audio file by their file path.\r\n\r\nHowever, streamed TAR archive files do not allow random access to their archived files.\r\n\r\nTherefore, we should enhance the Audio feature to support TAR archived files in sequential access.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3128\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3127","id":1032100613,"node_id":"I_kwDODunzps49hJsF","number":3127,"title":"datasets-cli: convertion of a tfds dataset to a huggingface one.","user":{"login":"vitalyshalumov","id":33824221,"node_id":"MDQ6VXNlcjMzODI0MjIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33824221?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vitalyshalumov","html_url":"https:\/\/github.com\/vitalyshalumov","followers_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/followers","following_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/orgs","repos_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/repos","events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-21T06:14:27Z","updated_at":"2021-10-27T11:36:05Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"### Discussed in https:\/\/github.com\/huggingface\/datasets\/discussions\/3079\r\n\r\n
\r\n\r\nOriginally posted by **vitalyshalumov** October 14, 2021<\/sup>\r\nI'm trying to convert a tfds dataset to a huggingface one.\r\n\r\nI've tried:\r\n\r\n1. datasets-cli convert --tfds_path ~\/tensorflow_datasets\/mnist\/3.0.1\/ --datasets_directory ~\/.cache\/huggingface\/datasets\/mnist\/3.0.1\/\r\n\r\n2. datasets-cli convert --tfds_path ~\/tensorflow_datasets\/mnist\/3.0.1\/ --datasets_directory ~\/.cache\/huggingface\/datasets\/\r\n\r\n\r\nand other permutations.\r\nThe script appears to be running and finishing without an error but when looking in the huggingface\/datasets\/ folder nothing is created.\r\n\r\n\r\n<\/div>","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3127\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3126","id":1032093055,"node_id":"I_kwDODunzps49hH1_","number":3126,"title":"\"arabic_billion_words\" dataset does not create the full dataset","user":{"login":"vitalyshalumov","id":33824221,"node_id":"MDQ6VXNlcjMzODI0MjIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33824221?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vitalyshalumov","html_url":"https:\/\/github.com\/vitalyshalumov","followers_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/followers","following_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/orgs","repos_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/repos","events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vitalyshalumov\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-10-21T06:02:38Z","updated_at":"2021-10-22T13:28:40Z","closed_at":"2021-10-22T13:28:40Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen running: \r\nraw_dataset = load_dataset('arabic_billion_words','Alittihad')\r\nthe correct dataset file is pulled from the url.\r\nBut, the generated dataset includes just a small portion of the data included in the file.\r\nThis is true for all other portions of the \"arabic_billion_words\" dataset ('Almasryalyoum',.....)\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nraw_dataset = load_dataset('arabic_billion_words','Alittihad')\r\n\r\n#The screen message\r\nDownloading and preparing dataset arabic_billion_words\/Alittihad (download: 332.13 MiB, generated: 20.62 MiB, post-processed: Unknown size, total: 352.74 MiB) \r\n\r\n## Expected results\r\nover 100K sentences\r\n\r\n## Actual results\r\nonly 11K sentences\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.0\r\n- Platform: Linux-5.8.0-63-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3126\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3125","id":1032046666,"node_id":"PR_kwDODunzps4teNPC","number":3125,"title":"Add SLR83 to OpenSLR","user":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-21T04:26:00Z","updated_at":"2021-10-22T20:10:05Z","closed_at":"2021-10-22T08:30:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3125","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3125","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3125.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3125.patch","merged_at":"2021-10-22T08:30:22Z"},"body":"The PR resolves #3119, adding SLR83 (UK and Ireland dialects) to the previously created OpenSLR dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3125\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3124","id":1031976286,"node_id":"PR_kwDODunzps4td-5w","number":3124,"title":"More efficient nested features encoding","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-21T01:55:31Z","updated_at":"2021-11-02T15:07:13Z","closed_at":"2021-11-02T11:04:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3124","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3124","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3124.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3124.patch","merged_at":"2021-11-02T11:04:04Z"},"body":"Nested encoding of features wastes a lot of time on operations which are effectively doing nothing when lists are used.\r\nFor example, if in the input we have a list of integers, `encoded_nested_example` will iterate over it and apply `encoded_nested_example` on every element even though it just return the int as is.\r\n\r\nA similar issue is handled at an earlier stage when casting pytorch\/tensorflow\/pandas objects to python lists\/numpy arrays:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/c98c23c4260edadab00f997d1a5d66b7f2e93ce9\/src\/datasets\/features\/features.py#L149-L156\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/c98c23c4260edadab00f997d1a5d66b7f2e93ce9\/src\/datasets\/features\/features.py#L212-L228\r\n\r\nIn this pull request I suggest to use the same approach in `encoded_nested_example`.\r\nIn my setup there was a major speedup with this change: loading the data was at least x4 faster. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3124\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3123","id":1031793207,"node_id":"I_kwDODunzps49f-o3","number":3123,"title":"Segmentation fault when loading datasets from file","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-10-20T20:16:11Z","updated_at":"2021-11-02T14:57:07Z","closed_at":"2021-11-02T14:57:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nCustom dataset loading sometimes segfaults and kills the process if chunks contain a variety of features\/\r\n\r\n## Steps to reproduce the bug\r\n\r\nDownload an example file:\r\n```\r\nwget https:\/\/gist.githubusercontent.com\/TevenLeScao\/11e2184394b3fa47d693de2550942c6b\/raw\/4232704d08fbfcaf93e5b51def9e5051507651ad\/tiny_kelm.jsonl\r\n```\r\nThen in Python:\r\n```\r\nimport datasets\r\ntiny_kelm = datasets.load_dataset(\"json\", data_files=\"tiny_kelm.jsonl\", chunksize=100000)\r\n```\r\n\r\n## Expected results\r\na `tiny_kelm` functional dataset\r\n\r\n## Actual results\r\n\u2620\ufe0f `Segmentation fault (core dumped)` \u2620\ufe0f\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.0\r\n- Platform: Linux-5.11.0-38-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 5.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3123\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3122","id":1031787509,"node_id":"I_kwDODunzps49f9P1","number":3122,"title":"OSError with a custom dataset loading script","user":{"login":"suzanab","id":38602977,"node_id":"MDQ6VXNlcjM4NjAyOTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38602977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/suzanab","html_url":"https:\/\/github.com\/suzanab","followers_url":"https:\/\/api.github.com\/users\/suzanab\/followers","following_url":"https:\/\/api.github.com\/users\/suzanab\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/suzanab\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/suzanab\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/suzanab\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/suzanab\/orgs","repos_url":"https:\/\/api.github.com\/users\/suzanab\/repos","events_url":"https:\/\/api.github.com\/users\/suzanab\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/suzanab\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-10-20T20:08:39Z","updated_at":"2021-11-23T09:55:38Z","closed_at":"2021-11-23T09:55:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am getting an OS error when trying to load the newly uploaded dataset classla\/janes_tag. What puzzles me is that I have already uploaded a very similar dataset - classla\/reldi_hr - with no issues. The loading scripts for the two datasets are almost identical and they have the same directory structure, yet I am only getting an error with janes_tag.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset = datasets.load_dataset('classla\/janes_tag', split='validation')\r\n```\r\n\r\n## Expected results\r\nDataset correctly loaded.\r\n\r\n## Actual results\r\n\r\nTraceback (most recent call last):\r\n File \"C:\/mypath\/test.py\", line 91, in \r\n load_and_print('janes_tag')\r\n File \"C:\/mypath\/test.py\", line 32, in load_and_print\r\n dataset = datasets.load_dataset('classla\/{}'.format(ds_name), split='validation')\r\n File \"C:\\mypath\\venv\\lib\\site-packages\\datasets\\load.py\", line 1632, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"C:\\mypath\\venv\\lib\\site-packages\\datasets\\builder.py\", line 608, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"C:\\mypath\\venv\\lib\\site-packages\\datasets\\builder.py\", line 704, in _download_and_prepare\r\n ) from None\r\nOSError: Cannot find data file. \r\nOriginal error:\r\n[Errno 2] No such file or directory: 'C:\\\\mypath\\\\.cache\\\\huggingface\\\\datasets\\\\downloads\\\\2c9996e44bdc5af9c89bffb9e6d7a3e42fdb2f56bacab45de13b20f3032ea7ca\\\\data\\\\train_all.conllup'\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.14.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.7.5\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3122\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3121","id":1031673115,"node_id":"PR_kwDODunzps4tc_6q","number":3121,"title":"Use huggingface_hub.HfApi to list datasets\/metrics","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-20T17:48:29Z","updated_at":"2021-11-05T11:45:08Z","closed_at":"2021-11-05T09:48:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3121","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3121","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3121.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3121.patch","merged_at":"2021-11-05T09:48:35Z"},"body":"Delete `datasets.inspect.HfApi` and use `huggingface_hub.HfApi` instead.\r\n\r\nWIP until https:\/\/github.com\/huggingface\/huggingface_hub\/pull\/429 is merged, then wait for the new release of `huggingface_hub`, update the `huggingface_hub` version in `setup.py` and merge this PR.\r\n\r\ncc: @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3121\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3120","id":1031574511,"node_id":"PR_kwDODunzps4tcril","number":3120,"title":"Correctly update metadata to preserve features when concatenating datasets with axis=1","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-20T15:54:58Z","updated_at":"2021-10-22T08:28:51Z","closed_at":"2021-10-21T14:50:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3120","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3120","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3120.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3120.patch","merged_at":"2021-10-21T14:50:21Z"},"body":"This PR correctly updates metadata to preserve higher-level feature types (e.g. `ClassLabel`) in `datasets.concatenate_datasets` when `axis=1`. Previously, we would delete the feature metadata in `datasets.concatenate_datasets` if `axis=1` and restore the feature types from the arrow table schema in `Dataset.__init__`. However, this approach only works for simple feature types (e.g. `Value`).\r\n\r\nFixes #3111","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3120\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3119","id":1031328044,"node_id":"I_kwDODunzps49eNEs","number":3119,"title":"Add OpenSLR 83 - Crowdsourced high-quality UK and Ireland English Dialect speech","user":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false},"assignees":[{"login":"tyrius02","id":4561309,"node_id":"MDQ6VXNlcjQ1NjEzMDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4561309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tyrius02","html_url":"https:\/\/github.com\/tyrius02","followers_url":"https:\/\/api.github.com\/users\/tyrius02\/followers","following_url":"https:\/\/api.github.com\/users\/tyrius02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tyrius02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tyrius02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tyrius02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tyrius02\/orgs","repos_url":"https:\/\/api.github.com\/users\/tyrius02\/repos","events_url":"https:\/\/api.github.com\/users\/tyrius02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tyrius02\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-10-20T12:05:07Z","updated_at":"2021-10-22T19:00:52Z","closed_at":"2021-10-22T08:30:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *openslr**\r\n- **Description:** *Data set which contains male and female recordings of English from various dialects of the UK and Ireland.*\r\n- **Paper:** *https:\/\/www.openslr.org\/resources\/83\/about.html*\r\n- **Data:** *Eleven separate data files can be found via https:\/\/www.openslr.org\/resources\/83\/*\r\n- **Motivation:** *Increase english ASR data with UK and Irish dialects*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nThe *openslr* dataset already exists, this will add additional subset, *SLR83*.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3119\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3118","id":1031309549,"node_id":"PR_kwDODunzps4tb0LY","number":3118,"title":"Fix CI error at each release commit","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-20T11:44:38Z","updated_at":"2021-10-20T13:02:36Z","closed_at":"2021-10-20T13:02:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3118","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3118","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3118.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3118.patch","merged_at":"2021-10-20T13:02:35Z"},"body":"Fix test_load_dataset_canonical at release commit.\r\n\r\nFix #3117.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3118\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3117","id":1031308083,"node_id":"I_kwDODunzps49eIMz","number":3117,"title":"CI error at each release commit","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-20T11:42:53Z","updated_at":"2021-10-20T13:02:35Z","closed_at":"2021-10-20T13:02:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After 1.12.0, there is a recurrent CI error at each release commit: https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8289\/workflows\/665d954d-e409-4602-8202-e678594d2946\/jobs\/51110\r\n\r\n```\r\n____________________ LoadTest.test_load_dataset_canonical _____________________\r\n[gw0] win32 -- Python 3.6.8 C:\\tools\\miniconda3\\python.exe\r\n\r\nself = \r\n\r\n def test_load_dataset_canonical(self):\r\n scripts_version = os.getenv(\"HF_SCRIPTS_VERSION\", SCRIPTS_VERSION)\r\n with self.assertRaises(FileNotFoundError) as context:\r\n datasets.load_dataset(\"_dummy\")\r\n self.assertIn(\r\n f\"https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/{scripts_version}\/datasets\/_dummy\/_dummy.py\",\r\n> str(context.exception),\r\n )\r\nE AssertionError: 'https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.14.0\/datasets\/_dummy\/_dummy.py' not found in \"Couldn't find a dataset script at C:\\\\Users\\\\circleci\\\\datasets\\\\_dummy\\\\_dummy.py or any data file in the same directory. Couldn't find '_dummy' on the Hugging Face Hub either: FileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/_dummy\/_dummy.py\"\r\n\r\ntests\\test_load.py:358: AssertionError\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3117\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3116","id":1031270611,"node_id":"PR_kwDODunzps4tbr6g","number":3116,"title":"Update doc links to point to new docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-20T11:00:47Z","updated_at":"2021-10-22T08:29:28Z","closed_at":"2021-10-22T08:26:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3116","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3116","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3116.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3116.patch","merged_at":"2021-10-22T08:26:45Z"},"body":"This PR:\r\n* updates the README links and the ADD_NEW_DATASET template to point to the new docs (the new docs don't have a section with the list of all the possible features, so I added that info to the `Features` docstring, which is then referenced in the ADD_NEW_DATASET template)\r\n* fixes some broken links in the `.rst` files (fixed with the `make linkcheck` tool)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3116\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3115","id":1030737524,"node_id":"PR_kwDODunzps4tZ-Vr","number":3115,"title":"Fill in dataset card for NCBI disease dataset","user":{"login":"edugp","id":17855740,"node_id":"MDQ6VXNlcjE3ODU1NzQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17855740?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/edugp","html_url":"https:\/\/github.com\/edugp","followers_url":"https:\/\/api.github.com\/users\/edugp\/followers","following_url":"https:\/\/api.github.com\/users\/edugp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/edugp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/edugp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/edugp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/edugp\/orgs","repos_url":"https:\/\/api.github.com\/users\/edugp\/repos","events_url":"https:\/\/api.github.com\/users\/edugp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/edugp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-19T20:57:05Z","updated_at":"2021-10-22T08:25:07Z","closed_at":"2021-10-22T08:25:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3115","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3115","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3115.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3115.patch","merged_at":"2021-10-22T08:25:07Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3115\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3114","id":1030693130,"node_id":"I_kwDODunzps49byEK","number":3114,"title":"load_from_disk in DatasetsDict\/Dataset not working with PyArrowHDFS wrapper implementing fsspec.spec.AbstractFileSystem","user":{"login":"francisco-perez-sorrosal","id":918006,"node_id":"MDQ6VXNlcjkxODAwNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918006?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal","html_url":"https:\/\/github.com\/francisco-perez-sorrosal","followers_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/followers","following_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/orgs","repos_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/repos","events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-19T20:01:45Z","updated_at":"2021-11-19T00:35:23Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nPassing a PyArrowHDFS implementation of fsspec.spec.AbstractFileSystem (in the `fs` param required by `load_from_disk` methods in `DatasetDict` (in datasets_dict.py) and `Dataset` (in arrow_dataset.py) results in an error when calling the download method in the `fs` parameter.\r\n\r\n\r\n## Steps to reproduce the bug\r\n\r\nThe documentation for the `fs` parameter states:\r\n\r\n```\r\nfs (:class:`~filesystems.S3FileSystem` or ``fsspec.spec.AbstractFileSystem``, optional, default ``None``):\r\n Instance of the remote filesystem used to download the files from.\r\n```\r\n\r\n`PyArrowHDFS` from [fsspec](https:\/\/filesystem-spec.readthedocs.io\/en\/latest\/_modules\/fsspec\/implementations\/hdfs.html) implements `fsspec.spec.AbstractFileSystem`. However, when using it as shown below, I get an error.\r\n\r\n```python\r\nfrom fsspec.implementations.hdfs import PyArrowHDFS\r\n...\r\ntransformed_corpus_path = \"\/user\/my_user\/clickbait\/transformed_ds\/\"\r\nfs = PyArrowHDFS(host, port, user, kerb_ticket=kerb_ticket)\r\ndss = DatasetDict.load_from_disk(transformed_corpus_path, fs, True)\r\n```\r\n\r\n## Expected results\r\n\r\nPrevious to load from disk, I have managed to successfully store in HDFS the data and meta-information of a DatasetDict by doing:\r\n```python\r\ntransformed_corpus_path = \"\/user\/my_user\/clickbait\/transformed_ds\/\"\r\nfs = PyArrowHDFS(host, port, user, kerb_ticket=kerb_ticket)\r\nmy_datasets.save_to_disk(transformed_corpus_path, fs=fs)\r\n```\r\n\r\nAs I have 3 datasets in the DatasetDict named `my_datasets`, the previous Python code creates the following contents in HDFS:\r\n\r\n```sh\r\n$ hadoop fs -ls \"\/user\/my_user\/clickbait\/transformed_ds\/\"\r\nFound 4 items\r\n-rw------- 3 my_user users 43 2021-10-19 03:08 \/user\/my_user\/clickbait\/transformed_ds\/dataset_dict.json\r\ndrwx------ - my_user users 0 2021-10-19 03:08 \/user\/my_user\/clickbait\/transformed_ds\/test\r\ndrwx------ - my_user users 0 2021-10-19 03:08 \/user\/my_user\/clickbait\/transformed_ds\/train\r\ndrwx------ - my_user users 0 2021-10-19 03:08 \/user\/my_user\/clickbait\/transformed_ds\/validation\r\n```\r\n\r\nI would expect to recover on `dss` the Arrow-backed datasets I previously saved in HDFS calling the `save_to_disk` method on the `DatasetDict` object when invoking `DatasetDict.load_from_disk(...)` as described above. \r\n\r\n## Actual results\r\n\r\nHowever, when trying to recover the saved datasets, I get this error:\r\n\r\n```\r\n...\r\n File \"\/home\/fperez\/dev\/neuromancer\/neuromancer\/corpus.py\", line 186, in load_transformed_corpus_from_disk\r\n dss = DatasetDict.load_from_disk(transformed_corpus_path, fs, True)\r\n File \"\/home\/fperez\/anaconda3\/envs\/neuromancer\/lib\/python3.9\/site-packages\/datasets\/dataset_dict.py\", line 748, in load_from_disk\r\n dataset_dict[k] = Dataset.load_from_disk(dataset_dict_split_path, fs, keep_in_memory=keep_in_memory)\r\n File \"\/home\/fperez\/anaconda3\/envs\/neuromancer\/lib\/python3.9\/site-packages\/datasets\/arrow_dataset.py\", line 1048, in load_from_disk\r\n fs.download(src_dataset_path, dataset_path.as_posix(), recursive=True)\r\n File \"pyarrow\/_hdfsio.pyx\", line 438, in pyarrow._hdfsio.HadoopFileSystem.download\r\nTypeError: download() got an unexpected keyword argument 'recursive'\r\n```\r\n\r\nExamining the [signature of the download method in pyarrow 5.0.0](https:\/\/github.com\/apache\/arrow\/blob\/54d2bd89c99df72fa091b025452f85dd5d88e3cf\/python\/pyarrow\/_hdfsio.pyx#L438) we can see that there's no download parameter:\r\n\r\n```python\r\n def download(self, path, stream, buffer_size=None):\r\n with self.open(path, 'rb') as f:\r\n f.download(stream, buffer_size=buffer_size)\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.13.3\r\n- Platform: Linux-3.10.0-1160.15.2.el7.x86_64-x86_64-with-glibc2.33\r\n- Python version: 3.9.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3114\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3113","id":1030667547,"node_id":"I_kwDODunzps49br0b","number":3113,"title":"Loading Data from HDF files","user":{"login":"FeryET","id":30388648,"node_id":"MDQ6VXNlcjMwMzg4NjQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30388648?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/FeryET","html_url":"https:\/\/github.com\/FeryET","followers_url":"https:\/\/api.github.com\/users\/FeryET\/followers","following_url":"https:\/\/api.github.com\/users\/FeryET\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/FeryET\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/FeryET\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/FeryET\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/FeryET\/orgs","repos_url":"https:\/\/api.github.com\/users\/FeryET\/repos","events_url":"https:\/\/api.github.com\/users\/FeryET\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/FeryET\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-19T19:26:46Z","updated_at":"2021-10-19T19:42:48Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nMore often than not I come along big HDF datasets, and currently there is no straight forward way to feed them to a dataset.\r\n\r\n**Describe the solution you'd like**\r\nI would love to see a `from_h5` method that gets an interface implemented by the user on how items are extracted from dataset (in case of multiple datasets containing elements like arrays and metadata and etc).\r\n\r\n**Describe alternatives you've considered**\r\nCurrently I manually load hdf files using `h5py` and implement PyTorch dataset interface. For small h5 files I load them into a pandas dataframe and use `from_pandas` function in the `datasets` package to load them, but for big datasets this is not feasible.\r\n\r\n**Additional context**\r\nHDF files are widespread throughout different domains and are one of the go to's for many researchers\/scientists\/engineers who work with numerical data. Given `datasets`' usecases have outgrown NLP use cases, it will make a lot of sense focusing on things like supporting HDF files.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3113\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3112","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3112\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3112\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3112\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3112","id":1030613083,"node_id":"I_kwDODunzps49behb","number":3112,"title":"OverflowError: There was an overflow in the . Try to reduce writer_batch_size to have batches smaller than 2GB","user":{"login":"BenoitDalFerro","id":69694610,"node_id":"MDQ6VXNlcjY5Njk0NjEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69694610?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BenoitDalFerro","html_url":"https:\/\/github.com\/BenoitDalFerro","followers_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/followers","following_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/orgs","repos_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/repos","events_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-19T18:21:41Z","updated_at":"2021-10-19T18:52:29Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nDespite having batches way under 2Gb when running `datasets.map()`, after processing correctly the data of the first batch without fuss and irrespective of writer_batch_size (say 2,4,8,16,32,64 and 128 in my case), it returns the following error :\r\n\r\n> OverflowError: There was an overflow in the . Try to reduce writer_batch_size to have batches smaller than 2GB\r\n\r\nNote that I always run `batch_size=writer_batch_size` :\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndatasets.map(lambda example : {\"column_name\" : function(arguments)}, batched=False, remove_columns = datasets.column_names, batch_size=batch_size, writer_batch_size=batch_size, disable_nullable=True, num_proc=None, desc=\"blablabla\")\r\n```\r\n## Introspecting CUDA memory during bug\r\n\r\nPlaced within `function(arguments)` the following statement to introspect memory usage, merely a little over 1\/4 of 2Gb\r\n`print(torch.cuda.memory_summary(device=device, abbreviated=False))`\r\n\r\n> |===========================================================================|\r\n | PyTorch CUDA memory summary, device ID 0 |\r\n |---------------------------------------------------------------------------|\r\n | CUDA OOMs: 0 | cudaMalloc retries: 0 |\r\n |===========================================================================|\r\n | Metric | Cur Usage | Peak Usage | Tot Alloc | Tot Freed |\r\n |---------------------------------------------------------------------------|\r\n | Allocated memory | 541418 KB | 545725 KB | 555695 KB | 14276 KB |\r\n | from large pool | 540672 KB | 544431 KB | 544431 KB | 3759 KB |\r\n | from small pool | 746 KB | 1714 KB | 11264 KB | 10517 KB |\r\n |---------------------------------------------------------------------------|\r\n | Active memory | 541418 KB | 545725 KB | 555695 KB | 14276 KB |\r\n | from large pool | 540672 KB | 544431 KB | 544431 KB | 3759 KB |\r\n | from small pool | 746 KB | 1714 KB | 11264 KB | 10517 KB |\r\n |---------------------------------------------------------------------------|\r\n | GPU reserved memory | 598016 KB | 598016 KB | 598016 KB | 0 B |\r\n | from large pool | 595968 KB | 595968 KB | 595968 KB | 0 B |\r\n | from small pool | 2048 KB | 2048 KB | 2048 KB | 0 B |\r\n |---------------------------------------------------------------------------|\r\n | Non-releasable memory | 36117 KB | 52292 KB | 274275 KB | 238158 KB |\r\n| from large pool | 34816 KB | 51537 KB | 261713 KB | 226897 KB |\r\n | from small pool | 1301 KB | 2045 KB | 12562 KB | 11261 KB |\r\n |---------------------------------------------------------------------------|\r\n | Allocations | 198 | 224 | 478 | 280 |\r\n | from large pool | 74 | 75 | 75 | 1 |\r\n | from small pool | 124 | 150 | 403 | 279 |\r\n |---------------------------------------------------------------------------|\r\n | Active allocs | 198 | 224 | 478 | 280 |\r\n | from large pool | 74 | 75 | 75 | 1 |\r\n | from small pool | 124 | 150 | 403 | 279 |\r\n |---------------------------------------------------------------------------|\r\n | GPU reserved segments | 21 | 21 | 21 | 0 |\r\n | from large pool | 20 | 20 | 20 | 0 |\r\n | from small pool | 1 | 1 | 1 | 0 |\r\n |---------------------------------------------------------------------------|\r\n | Non-releasable allocs | 18 | 23 | 166 | 148 |\r\n | from large pool | 17 | 18 | 19 | 2 |\r\n | from small pool | 1 | 6 | 147 | 146 |\r\n |===========================================================================|\r\n\r\n## Expected results\r\nEfficiently process the datasets and write it down to disk.\r\n\r\n## Actual results\r\n--------------------------------------------------------------------------\r\nOverflowError Traceback (most recent call last)\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, disable_tqdm, desc, cache_only)\r\n 2390 else:\r\n-> 2391 writer.write(example)\r\n 2392 else:\r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_writer.py in write(self, example, key, writer_batch_size)\r\n 367 \r\n--> 368 self.write_examples_on_file()\r\n 369 \r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_writer.py in write_examples_on_file(self)\r\n 316 if not isinstance(pa_array[0], pa.lib.FloatScalar):\r\n--> 317 raise OverflowError(\r\n 318 \"There was an overflow in the {}. Try to reduce writer_batch_size to have batches smaller than 2GB\".format(\r\n\r\nOverflowError: There was an overflow in the . Try to reduce writer_batch_size to have batches smaller than 2GB\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nOverflowError Traceback (most recent call last)\r\n~\\AppData\\Local\\Temp\/ipykernel_16268\/2456940807.py in \r\n 3 #tracker = OfflineEmissionsTracker(country_iso_code=\"FRA\", project_name='xxx'+time_stamp,output_dir='.\/codecarbon')\r\n 4 #tracker.start()\r\n----> 5 process_datasets(source_datasets_paths, dataset_dir, LM_tokenizer, LMhead_model, datasets_selection=['wikipedia'], from_scratch=True,\r\n 6 clean_sentences=False, negative_sampling=False, translate=False, tokenize=False, generate_embeddings=True, concatenate_embeddings=False,\r\n 7 max_sample=10000, padding='do_not_pad', truncation=True, cpu_batch_size=1000, gpu_batch_size=2, cpu_writer_batch_size=1000, gpu_writer_batch_size=2, disable_nullable=True, num_proc=None) #\r\n\r\n~\\xxx\\xxx.py in process_datasets(source_datasets_paths, dataset_dir, LM_tokenizer, LMhead_model, datasets_selection, from_scratch, clean_sentences, translate, negative_sampling, tokenize, generate_embeddings, concatenate_embeddings, max_sample, padding, truncation, cpu_batch_size, gpu_batch_size, cpu_writer_batch_size, gpu_writer_batch_size, disable_nullable, num_proc)\r\n 481 for column in tqdm(dataset.column_names, desc=f'Processing column', leave=False):\r\n 482 if \"xxx_\" in column:\r\n--> 483 dataset = dataset.map(lambda example :\r\n 484 {\"embeddings_\"+str(column).replace(\"translated_\",\"\"):function(input_ids=example[column],\r\n 485 token_type_ids=example[column.replace(\"input_ids\",\"token_type_ids\")],\r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint, desc)\r\n 2034 \r\n 2035 if num_proc is None or num_proc == 1:\r\n-> 2036 return self._map_single(\r\n 2037 function=function,\r\n 2038 with_indices=with_indices,\r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_dataset.py in wrapper(*args, **kwargs)\r\n 501 self: \"Dataset\" = kwargs.pop(\"self\")\r\n 502 # apply actual function\r\n--> 503 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 504 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 505 for dataset in datasets:\r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_dataset.py in wrapper(*args, **kwargs)\r\n 468 }\r\n 469 # apply actual function\r\n--> 470 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 471 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 472 # re-apply format to the output\r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\fingerprint.py in wrapper(*args, **kwargs)\r\n 404 # Call actual function\r\n 405 \r\n--> 406 out = func(self, *args, **kwargs)\r\n 407 \r\n 408 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, disable_tqdm, desc, cache_only)\r\n 2425 if update_data:\r\n 2426 if writer is not None:\r\n-> 2427 writer.finalize()\r\n 2428 if tmp_file is not None:\r\n 2429 tmp_file.close()\r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_writer.py in finalize(self, close_stream)\r\n 440 # Re-intializing to empty list for next batch\r\n 441 self.hkey_record = []\r\n--> 442 self.write_examples_on_file()\r\n 443 if self.pa_writer is None:\r\n 444 if self._schema is not None:\r\n\r\n~\\anaconda3\\envs\\xxx\\lib\\site-packages\\datasets\\arrow_writer.py in write_examples_on_file(self)\r\n 315 # This check fails with FloatArrays with nans, which is not what we want, so account for that:\r\n 316 if not isinstance(pa_array[0], pa.lib.FloatScalar):\r\n--> 317 raise OverflowError(\r\n 318 \"There was an overflow in the {}. Try to reduce writer_batch_size to have batches smaller than 2GB\".format(\r\n 319 type(pa_array)\r\n\r\nOverflowError: There was an overflow in the . Try to reduce writer_batch_size to have batches smaller than 2GB\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.13.3\r\n- Platform: Windows-10-10.0.19042-SP0\r\n- Python version: 3.8.11\r\n- PyArrow version: 3.0.0\r\n\r\n##Next steps\r\nTesting on Linux.\r\n@albertvillanova \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3112\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3112\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3111","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3111\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3111\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3111\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3111","id":1030598983,"node_id":"I_kwDODunzps49bbFH","number":3111,"title":"concatenate_datasets removes ClassLabel typing.","user":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-10-19T18:05:31Z","updated_at":"2021-10-21T14:50:21Z","closed_at":"2021-10-21T14:50:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nWhen concatenating two datasets, we lose typing of ClassLabel columns.\r\n\r\nI can work on this if this is a legitimate bug,\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\nfrom datasets import Dataset, ClassLabel, Value, concatenate_datasets\r\n\r\nDS_LEN = 100\r\nmy_dataset = Dataset.from_dict(\r\n {\r\n \"sentence\": [f\"{chr(i % 10)}\" for i in range(DS_LEN)],\r\n \"label\": [i % 2 for i in range(DS_LEN)]\r\n }\r\n)\r\nmy_predictions = Dataset.from_dict(\r\n {\r\n \"pred\": [(i + 1) % 2 for i in range(DS_LEN)]\r\n }\r\n)\r\n\r\nmy_dataset = my_dataset.cast(datasets.Features({\"sentence\": Value(\"string\"), \"label\": ClassLabel(2, names=[\"POS\", \"NEG\"])}))\r\nprint(\"Original\")\r\nprint(my_dataset)\r\nprint(my_dataset.features)\r\n\r\n\r\nconcat_ds = concatenate_datasets([my_dataset, my_predictions], axis=1)\r\nprint(\"Concatenated\")\r\nprint(concat_ds)\r\nprint(concat_ds.features)\r\n\r\n\r\n```\r\n\r\n## Expected results\r\nThe features of `concat_ds` should contain ClassLabel.\r\n\r\n## Actual results\r\n\r\nOn master, I get: \r\n```\r\n{'sentence': Value(dtype='string', id=None), 'label': Value(dtype='int64', id=None), 'pred': Value(dtype='int64', id=None)}\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.14.1.dev0\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n- Python version: 3.8.11\r\n- PyArrow version: 4.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3111\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3111\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3110","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3110\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3110\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3110\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3110","id":1030558484,"node_id":"PR_kwDODunzps4tZakS","number":3110,"title":"Stream TAR-based dataset using iter_archive","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-19T17:16:24Z","updated_at":"2021-11-05T17:48:49Z","closed_at":"2021-11-05T17:48:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3110","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3110","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3110.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3110.patch","merged_at":"2021-11-05T17:48:48Z"},"body":"I converted all the dataset based on TAR archive to use iter_archive instead, so that they can be streamable.\r\n\r\nIt means that around 80 datasets become streamable :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3110\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3110\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3109","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3109\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3109\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3109\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3109","id":1030543284,"node_id":"PR_kwDODunzps4tZXmC","number":3109,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-19T16:59:31Z","updated_at":"2021-10-19T17:13:28Z","closed_at":"2021-10-19T17:13:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3109","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3109","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3109.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3109.patch","merged_at":"2021-10-19T17:13:27Z"},"body":"Update BibTeX entry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3109\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3109\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3108","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3108\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3108\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3108\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3108","id":1030405618,"node_id":"PR_kwDODunzps4tY8ID","number":3108,"title":"Add Google BLEU (aka GLEU) metric","user":{"login":"slowwavesleep","id":44175589,"node_id":"MDQ6VXNlcjQ0MTc1NTg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44175589?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slowwavesleep","html_url":"https:\/\/github.com\/slowwavesleep","followers_url":"https:\/\/api.github.com\/users\/slowwavesleep\/followers","following_url":"https:\/\/api.github.com\/users\/slowwavesleep\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slowwavesleep\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slowwavesleep\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slowwavesleep\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slowwavesleep\/orgs","repos_url":"https:\/\/api.github.com\/users\/slowwavesleep\/repos","events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-19T14:48:38Z","updated_at":"2021-10-25T14:07:04Z","closed_at":"2021-10-25T14:07:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3108","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3108","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3108.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3108.patch","merged_at":"2021-10-25T14:07:04Z"},"body":"This PR adds the NLTK implementation of Google BLEU metric. This is also a part of an effort to resolve an unfortunate naming collision between GLEU for machine translation and GLEU for grammatical error correction.\r\nI used [this page](https:\/\/huggingface.co\/docs\/datasets\/add_metric.html) for reference. Please, point me to the right direction if I missed anything.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3108\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3108\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3107","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3107\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3107\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3107\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3107","id":1030357527,"node_id":"PR_kwDODunzps4tYyhF","number":3107,"title":"Add paper BibTeX citation","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-19T14:08:11Z","updated_at":"2021-10-19T14:26:22Z","closed_at":"2021-10-19T14:26:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3107","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3107","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3107.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3107.patch","merged_at":"2021-10-19T14:26:21Z"},"body":"Add paper BibTeX citation to README file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3107\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3107\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3106","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3106\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3106\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3106\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3106","id":1030112473,"node_id":"PR_kwDODunzps4tYA6i","number":3106,"title":"Fix URLs in blog_authorship_corpus dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-19T10:06:05Z","updated_at":"2021-10-19T12:50:40Z","closed_at":"2021-10-19T12:50:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3106","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3106","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3106.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3106.patch","merged_at":"2021-10-19T12:50:39Z"},"body":"After contacting the authors of the paper \"Effects of Age and Gender on Blogging\", they confirmed:\r\n- the old URLs are no longer valid\r\n- there are alternative host URLs\r\n\r\nFix #3091.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3106\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3106\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3105","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3105\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3105\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3105\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3105","id":1029098843,"node_id":"I_kwDODunzps49Vs1b","number":3105,"title":"download_mode=`force_redownload` does not work on removed datasets","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-18T13:12:38Z","updated_at":"2021-10-22T09:36:10Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nIf a cached dataset is removed from the library, I don't see how to delete it programmatically. I thought that using `force_redownload` would try to refresh the cache, then raise an exception, but it reuses the cache instead.\r\n\r\n## Steps to reproduce the bug\r\n\r\n_requires to already have `wit` in the cache_: see https:\/\/github.com\/huggingface\/datasets\/pull\/2981\r\n\r\n```python\r\nimport datasets as ds\r\ndataset = ds.load_dataset(\"wit\", split=\"train\", download_mode='force_redownload')\r\n```\r\n\r\n## Expected results\r\n\r\nIt should raise an exception, since the dataset does not exist anymore.\r\n\r\n## Actual results\r\n\r\nIt uses the cached result\r\n\r\n```\r\nUsing the latest cached version of the module from \/home\/slesage\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wit\/107afbffd48e058b19101bddc47fbee25fa68eb6d50a733e262875f1285a5171 (last modified on Wed Sep 29 08:21:10 2021) since it couldn't be found locally at wit, or remotely on the Hugging Face Hub.\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.13.4.dev0\r\n- Platform: Linux-5.11.0-1019-aws-x86_64-with-glibc2.31\r\n- Python version: 3.9.6\r\n- PyArrow version: 4.0.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3105\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3105\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3104","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3104\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3104\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3104\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3104","id":1029080412,"node_id":"I_kwDODunzps49VoVc","number":3104,"title":"Missing Zenodo 1.13.3 release","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-10-18T12:57:18Z","updated_at":"2021-10-22T13:22:25Z","closed_at":"2021-10-22T13:22:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After `datasets` 1.13.3 release, this does not appear in Zenodo releases: https:\/\/zenodo.org\/record\/5570305\r\n\r\nTODO:\r\n- [x] Contact Zenodo support\r\n- [x] Check it is fixed","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3104\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3104\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3103","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3103\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3103\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3103\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3103","id":1029069310,"node_id":"PR_kwDODunzps4tUzJQ","number":3103,"title":"Fix project description in PyPI","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-18T12:47:29Z","updated_at":"2021-10-18T12:59:57Z","closed_at":"2021-10-18T12:59:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3103","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3103","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3103.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3103.patch","merged_at":"2021-10-18T12:59:56Z"},"body":"Fix project description appearing in PyPI, so that it contains the content of the README.md file (like transformers).\r\n\r\nCurrently, `datasets` project description appearing in PyPI shows the release instructions addressed to core maintainers: https:\/\/pypi.org\/project\/datasets\/1.13.3\/\r\n\r\nFix #3102.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3103\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3103\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3102","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3102\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3102\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3102\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3102","id":1029067062,"node_id":"I_kwDODunzps49VlE2","number":3102,"title":"Unsuitable project description in PyPI","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-18T12:45:00Z","updated_at":"2021-10-18T12:59:56Z","closed_at":"2021-10-18T12:59:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, `datasets` project description appearing in PyPI shows the release instructions addressed to core maintainers: https:\/\/pypi.org\/project\/datasets\/1.13.3\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3102\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3102\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3101","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3101\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3101\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3101\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3101","id":1028966968,"node_id":"PR_kwDODunzps4tUelE","number":3101,"title":"Update SUPERB to use Audio features","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-18T11:05:18Z","updated_at":"2021-10-18T12:33:54Z","closed_at":"2021-10-18T12:06:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3101","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3101","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3101.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3101.patch","merged_at":"2021-10-18T12:06:46Z"},"body":"This is the same dataset refresh as the other Audio ones: https:\/\/github.com\/huggingface\/datasets\/pull\/3081\r\n\r\ncc @patrickvonplaten ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3101\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3101\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3100","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3100\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3100\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3100\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3100","id":1028738180,"node_id":"PR_kwDODunzps4tTwpn","number":3100,"title":"Replace FSTimeoutError with parent TimeoutError","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-18T07:37:09Z","updated_at":"2021-10-18T07:51:55Z","closed_at":"2021-10-18T07:51:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3100","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3100","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3100.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3100.patch","merged_at":"2021-10-18T07:51:54Z"},"body":"PR #3050 introduced a dependency on `fsspec.FSTiemoutError`. Note that this error only exists from `fsspec` version `2021.06.0` (June 2021).\r\n\r\nTo fix #3097, there are 2 alternatives:\r\n- Either pinning `fsspec` to versions newer or equal to `2021.06.0`\r\n- Or replacing `fsspec.FSTimeoutError` wth its parent `asyncio.TimeoutError`, which exists from Python 3.8.0 (Sep 2018).\r\n\r\nThis PR implements the second approach.\r\n\r\nFix #3097.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3100\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3100\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3099","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3099\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3099\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3099\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3099","id":1028338078,"node_id":"I_kwDODunzps49SzGe","number":3099,"title":"AttributeError: module 'huggingface_hub.hf_api' has no attribute 'DatasetInfo'","user":{"login":"JTWang2000","id":49268567,"node_id":"MDQ6VXNlcjQ5MjY4NTY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49268567?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JTWang2000","html_url":"https:\/\/github.com\/JTWang2000","followers_url":"https:\/\/api.github.com\/users\/JTWang2000\/followers","following_url":"https:\/\/api.github.com\/users\/JTWang2000\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JTWang2000\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JTWang2000\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JTWang2000\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JTWang2000\/orgs","repos_url":"https:\/\/api.github.com\/users\/JTWang2000\/repos","events_url":"https:\/\/api.github.com\/users\/JTWang2000\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JTWang2000\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-10-17T14:17:47Z","updated_at":"2021-11-09T16:42:29Z","closed_at":"2021-11-09T16:42:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen using `pip install datasets`\r\nor use `conda install -c huggingface -c conda-forge datasets`\r\ncannot install datasets\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"sst\", \"default\")\r\n```\r\n\r\n## Actual results\r\n---------------------------------------------------------------------------\r\nAttributeError Traceback (most recent call last)\r\n in \r\n 1 import torch\r\n 2 import transformers\r\n----> 3 from datasets import load_dataset\r\n 4 \r\n 5 dataset = load_dataset(\"sst\", \"default\")\r\n\r\n~\/miniforge3\/envs\/actor\/lib\/python3.8\/site-packages\/datasets\/__init__.py in \r\n 35 from .arrow_reader import ArrowReader, ReadInstruction\r\n 36 from .arrow_writer import ArrowWriter\r\n---> 37 from .builder import ArrowBasedBuilder, BeamBasedBuilder, BuilderConfig, DatasetBuilder, GeneratorBasedBuilder\r\n 38 from .combine import interleave_datasets\r\n 39 from .dataset_dict import DatasetDict, IterableDatasetDict\r\n\r\n~\/miniforge3\/envs\/actor\/lib\/python3.8\/site-packages\/datasets\/builder.py in \r\n 42 )\r\n 43 from .arrow_writer import ArrowWriter, BeamWriter\r\n---> 44 from .data_files import DataFilesDict, _sanitize_patterns\r\n 45 from .dataset_dict import DatasetDict, IterableDatasetDict\r\n 46 from .fingerprint import Hasher\r\n\r\n~\/miniforge3\/envs\/actor\/lib\/python3.8\/site-packages\/datasets\/data_files.py in \r\n 118 \r\n 119 def _exec_patterns_in_dataset_repository(\r\n--> 120 dataset_info: huggingface_hub.hf_api.DatasetInfo,\r\n 121 patterns: List[str],\r\n 122 allowed_extensions: Optional[list] = None,\r\n\r\nAttributeError: module 'huggingface_hub.hf_api' has no attribute 'DatasetInfo'\r\n\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.13.3\r\n- Platform: macOS-11.3.1-arm64-arm-64bit\r\n- Python version: 3.8.10\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3099\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3099\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3098","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3098\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3098\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3098\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3098","id":1028210790,"node_id":"PR_kwDODunzps4tSRSZ","number":3098,"title":"Push to hub capabilities for `Dataset` and `DatasetDict`","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-10-17T04:12:44Z","updated_at":"2021-12-08T16:04:50Z","closed_at":"2021-11-24T11:25:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3098","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3098","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3098.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3098.patch","merged_at":"2021-11-24T11:25:36Z"},"body":"This PR implements a `push_to_hub` method on `Dataset` and `DatasetDict`. This does not currently work in `IterableDatasetDict` nor `IterableDataset` as those are simple dicts and I would like your opinion on how you would like to implement this before going ahead and doing it.\r\n\r\nThis implementation needs to be used with the following `huggingface_hub` branch in order to work correctly: https:\/\/github.com\/huggingface\/huggingface_hub\/pull\/415\r\n\r\n### Implementation\r\n\r\nThe `push_to_hub` API is entirely based on HTTP requests rather than a git-based workflow:\r\n- This allows pushing changes without firstly cloning the repository, which reduces the time in half for the `push_to_hub` method.\r\n- Collaboration, as well as the system of branches\/merges\/rebases is IMO less straightforward than for models and spaces. In the situation where such collaboration is needed, I would *heavily* advocate for the `Repository` helper of the `huggingface_hub` to be used instead of the `push_to_hub` method which will always be, by design, limiting in that regard (even if based on a git-workflow instead of HTTP requests)\r\n\r\nIn order to overcome the limit of 5GB files set by the HTTP requests, dataset sharding is used.\r\n\r\n### Testing\r\n\r\nThe test suite implemented here makes use of the moon-staging instead of the production setup. As several repositories are created and deleted, it is better to use the staging.\r\n\r\nIt does not require setting an environment variable or any kind of special attention but introduces a new decorator `with_staging_testing` which patches global variables to use the staging endpoint instead of the production endpoint.\r\n\r\n### Examples\r\n\r\nThe tests cover a lot of examples and behavior.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3098\/reactions","total_count":4,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":3,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3098\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3097","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3097\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3097\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3097\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3097","id":1027750811,"node_id":"I_kwDODunzps49Qjub","number":3097,"title":"`ModuleNotFoundError: No module named 'fsspec.exceptions'`","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-10-15T19:34:38Z","updated_at":"2021-10-18T07:51:54Z","closed_at":"2021-10-18T07:51:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI keep runnig into a fsspec ModuleNotFound error\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n>>> from datasets import get_dataset_infos\r\n2021-10-15 15:25:37.863206: W tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory\r\n2021-10-15 15:25:37.863252: I tensorflow\/stream_executor\/cuda\/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine.\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/__init__.py\", line 37, in \r\n from .builder import ArrowBasedBuilder, BeamBasedBuilder, BuilderConfig, DatasetBuilder, GeneratorBasedBuilder\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 56, in \r\n from .utils.streaming_download_manager import StreamingDownloadManager\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/streaming_download_manager.py\", line 11, in \r\n from fsspec.exceptions import FSTimeoutError\r\nModuleNotFoundError: No module named 'fsspec.exceptions'\r\n```\r\n\r\nYet, I do have `fsspec`:\r\n```bash\r\nhf@victor-scale:~\/dev\/promptsource$ pip show fsspec\r\nName: fsspec\r\nVersion: 2021.5.0\r\nSummary: File-system specification\r\nHome-page: http:\/\/github.com\/intake\/filesystem_spec\r\nAuthor: None\r\nAuthor-email: None\r\nLicense: BSD\r\nLocation: \/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\r\nRequires: \r\nRequired-by: datasets\r\n```\r\n\r\nWith the same version of fsspec and `datasets==1.9.0`, I don't see this problem....\r\n\r\n## Environment info\r\n\r\nI can't even run `datasets-cli env` actually.., but here's my env:\r\n- `datasets` version: 1.13.3\r\n- Platform: Ubuntu 18.04\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3097\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3097\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3096","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3096\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3096\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3096\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3096","id":1027535685,"node_id":"PR_kwDODunzps4tQblQ","number":3096,"title":"Fix Audio feature mp3 resampling","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-15T15:05:19Z","updated_at":"2021-10-15T15:38:30Z","closed_at":"2021-10-15T15:38:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3096","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3096","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3096.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3096.patch","merged_at":"2021-10-15T15:38:29Z"},"body":"Issue #3095 is related to mp3 resampling, not to `cast_column`.\r\n\r\nThis PR fixes Audio feature mp3 resampling.\r\n\r\nFix #3095.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3096\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3096\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3095","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3095\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3095\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3095\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3095","id":1027453146,"node_id":"I_kwDODunzps49PbDa","number":3095,"title":"`cast_column` makes audio decoding fail","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-10-15T13:36:58Z","updated_at":"2021-10-15T15:38:30Z","closed_at":"2021-10-15T15:38:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nAfter changing the sampling rate automatic decoding fails.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nimport datasets\r\n\r\nds = load_dataset(\"common_voice\", \"ab\", split=\"train\")\r\n\r\nds = ds.cast_column(\"audio\", datasets.features.Audio(sampling_rate=16_000))\r\n\r\nprint(ds[0][\"audio\"]) # <- this fails currently\r\n```\r\n\r\nyields:\r\n\r\n```\r\nTypeError: forward() takes 2 positional arguments but 4 were given\r\n```\r\n\r\n## Expected results\r\nno failure\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\nCopy-and-paste the text below in your GitHub issue.\r\n\r\n- `datasets` version: 1.13.2 (master)\r\n- Platform: Linux-5.11.0-1019-aws-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3095\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3095\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3094","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3094\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3094\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3094\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3094","id":1027328633,"node_id":"I_kwDODunzps49O8p5","number":3094,"title":"Support loading a dataset from SQLite files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-15T10:58:41Z","updated_at":"2021-10-15T10:58:41Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As requested by @julien-c, we could eventually support loading a dataset from SQLite files, like it is the case for JSON\/CSV files.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3094\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3094\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3093","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3093\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3093\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3093\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3093","id":1027262124,"node_id":"I_kwDODunzps49Osas","number":3093,"title":"Error loading json dataset with multiple splits if keys in nested dicts have a different order","user":{"login":"dthulke","id":8331189,"node_id":"MDQ6VXNlcjgzMzExODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8331189?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dthulke","html_url":"https:\/\/github.com\/dthulke","followers_url":"https:\/\/api.github.com\/users\/dthulke\/followers","following_url":"https:\/\/api.github.com\/users\/dthulke\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dthulke\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dthulke\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dthulke\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dthulke\/orgs","repos_url":"https:\/\/api.github.com\/users\/dthulke\/repos","events_url":"https:\/\/api.github.com\/users\/dthulke\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dthulke\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-15T09:33:25Z","updated_at":"2021-11-10T13:20:34Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nLoading a json dataset with multiple splits that have nested dicts with keys in different order results in the error below.\r\n\r\nIf the keys in the nested dicts always have the same order or even if you just load a single split in which the nested dicts don't have the same order, everything works fine.\r\n\r\n## Steps to reproduce the bug\r\nCreate two json files:\r\n\r\ntrain.json\r\n```\r\n{\"a\": {\"c\": 8, \"b\": 5}}\r\n{\"a\": {\"b\": 7, \"c\": 6}}\r\n```\r\n\r\ntest.json\r\n```\r\n{\"a\": {\"b\": 1, \"c\": 2}}\r\n{\"a\": {\"b\": 3, \"c\": 4}}\r\n```\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n# Loading the files individually works (even though the keys in train.json don't have the same order)\r\nload_dataset('json', data_files={\"test\": \"test.json\"})\r\nload_dataset('json', data_files={\"train\": \"train.json\"})\r\n# Loading both splits fails\r\nload_dataset('json', data_files={\"train\": \"train.json\", \"test\": \"test.json\"})\r\n```\r\n\r\n## Expected results\r\nLoading both splits should not give an error whether the nested dicts are have the same order or not.\r\n\r\n## Actual results\r\n```\r\n>>> load_dataset('json', data_files={\"train\": \"train.json\", \"test\": \"test.json\"})\r\nUsing custom data configuration default-f1bc76fd07398c4c\r\nDownloading and preparing dataset json\/default to \/home\/dthulke\/.cache\/huggingface\/datasets\/json\/default-f1bc76fd07398c4c\/0.0.0\/c2d554c3377ea79c7664b93dc65d0803b45e3279000f993c7bfd18937fd7f426...\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 8839.42it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2\/2 [00:00<00:00, 477.82it\/s]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/dthulke\/venvs\/venv_torch_transformers\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 1632, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/dthulke\/venvs\/venv_torch_transformers\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 608, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/dthulke\/venvs\/venv_torch_transformers\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 697, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/dthulke\/venvs\/venv_torch_transformers\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 1159, in _prepare_split\r\n writer.write_table(table)\r\n File \"\/home\/dthulke\/venvs\/venv_torch_transformers\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py\", line 428, in write_table\r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"pyarrow\/table.pxi\", line 1596, in pyarrow.lib.Table.from_arrays\r\n File \"pyarrow\/table.pxi\", line 592, in pyarrow.lib._sanitize_arrays\r\n File \"pyarrow\/array.pxi\", line 329, in pyarrow.lib.asarray\r\n File \"pyarrow\/table.pxi\", line 277, in pyarrow.lib.ChunkedArray.cast\r\n File \"\/home\/dthulke\/venvs\/venv_torch_transformers\/lib\/python3.6\/site-packages\/pyarrow\/compute.py\", line 297, in cast\r\n return call_function(\"cast\", [arr], options)\r\n File \"pyarrow\/_compute.pyx\", line 527, in pyarrow._compute.call_function\r\n File \"pyarrow\/_compute.pyx\", line 337, in pyarrow._compute.Function.call\r\n File \"pyarrow\/error.pxi\", line 143, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 120, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowNotImplementedError: Unsupported cast from struct to struct using function cast_struct\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.13.2\r\n- Platform: Linux-4.15.0-147-generic-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.6.9\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3093\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3093\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3092","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3092\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3092\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3092\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3092","id":1027260383,"node_id":"PR_kwDODunzps4tPj6e","number":3092,"title":"Fix JNLBA dataset","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-15T09:31:14Z","updated_at":"2021-10-22T08:23:57Z","closed_at":"2021-10-22T08:23:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3092","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3092","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3092.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3092.patch","merged_at":"2021-10-22T08:23:57Z"},"body":"As mentioned in #3089, I've added more tags and also updated the link for dataset which was earlier using a Google Drive link. \r\nI'm having problem with generating dummy data as `datasets-cli dummy_data .\/datasets\/jnlpba --auto_generate --match_text_files \"*.iob2\"` is giving `datasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\n` error. I'll try to add dummy data manually.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3092\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3092\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3091","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3091\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3091\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3091\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3091","id":1027251530,"node_id":"I_kwDODunzps49Op1K","number":3091,"title":"`blog_authorship_corpus` is broken","user":{"login":"fdtomasi","id":12514317,"node_id":"MDQ6VXNlcjEyNTE0MzE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12514317?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fdtomasi","html_url":"https:\/\/github.com\/fdtomasi","followers_url":"https:\/\/api.github.com\/users\/fdtomasi\/followers","following_url":"https:\/\/api.github.com\/users\/fdtomasi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fdtomasi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fdtomasi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fdtomasi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fdtomasi\/orgs","repos_url":"https:\/\/api.github.com\/users\/fdtomasi\/repos","events_url":"https:\/\/api.github.com\/users\/fdtomasi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fdtomasi\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-10-15T09:20:40Z","updated_at":"2021-10-19T13:06:10Z","closed_at":"2021-10-19T12:50:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe dataset `blog_authorship_corpus` is broken.\r\nBy bypassing the checksum checks, the loading does not return any error but the resulting dataset is empty.\r\nI suspect it is because the data download url is broken (http:\/\/www.cs.biu.ac.il\/~koppel\/blogs\/blogs.zip).\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nds = load_dataset(\"blog_authorship_corpus\", split=\"train\", download_mode='force_redownload')\r\n```\r\n\r\n## Expected results\r\nNo error.\r\n\r\n## Actual results\r\n```\r\n---------------------------------------------------------------------------\r\nNonMatchingChecksumError Traceback (most recent call last)\r\n\/tmp\/ipykernel_5237\/1729238701.py in \r\n 2 ds = load_dataset(\r\n 3 \"blog_authorship_corpus\", split=\"train\",\r\n----> 4 download_mode='force_redownload'\r\n 5 )\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, task, streaming, **config_kwargs)\r\n 1115 ignore_verifications=ignore_verifications,\r\n 1116 try_from_hf_gcs=try_from_hf_gcs,\r\n-> 1117 use_auth_token=use_auth_token,\r\n 1118 )\r\n 1119 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 635 if not downloaded_from_gcs:\r\n 636 self._download_and_prepare(\r\n--> 637 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 638 )\r\n 639 # Sync info\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 707 if verify_infos:\r\n 708 verify_checksums(\r\n--> 709 self.info.download_checksums, dl_manager.get_recorded_sizes_checksums(), \"dataset source files\"\r\n 710 )\r\n 711 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 38 if len(bad_urls) > 0:\r\n 39 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 40 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 41 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 42 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['http:\/\/www.cs.biu.ac.il\/~koppel\/blogs\/blogs.zip']\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.13.2\r\n- Platform: Linux-4.19.0-18-cloud-amd64-x86_64-with-debian-10.11\r\n- Python version: 3.7.10\r\n- PyArrow version: 5.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3091\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3091\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3090","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3090\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3090\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3090\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3090","id":1027100371,"node_id":"PR_kwDODunzps4tPEtH","number":3090,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-15T05:39:27Z","updated_at":"2021-10-15T07:35:57Z","closed_at":"2021-10-15T07:35:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3090","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3090","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3090.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3090.patch","merged_at":"2021-10-15T07:35:57Z"},"body":"Update BibTeX entry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3090\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3090\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3089","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3089\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3089\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3089\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3089","id":1026973360,"node_id":"I_kwDODunzps49Nl6w","number":3089,"title":"JNLPBA Dataset","user":{"login":"sciarrilli","id":10460111,"node_id":"MDQ6VXNlcjEwNDYwMTEx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10460111?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sciarrilli","html_url":"https:\/\/github.com\/sciarrilli","followers_url":"https:\/\/api.github.com\/users\/sciarrilli\/followers","following_url":"https:\/\/api.github.com\/users\/sciarrilli\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sciarrilli\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sciarrilli\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sciarrilli\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sciarrilli\/orgs","repos_url":"https:\/\/api.github.com\/users\/sciarrilli\/repos","events_url":"https:\/\/api.github.com\/users\/sciarrilli\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sciarrilli\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-15T01:16:02Z","updated_at":"2021-10-22T08:23:57Z","closed_at":"2021-10-22T08:23:57Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\n```\r\n\r\n## Expected results\r\nThe dataset loading script for this dataset is incorrect. This is a biomedical dataset used for named entity recognition. The entities in the [script](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/jnlpba\/jnlpba.py#L81-L83) are: O, B, and I. The correct entities from the original data file are: \r\n\r\n['O',\r\n 'B-DNA',\r\n 'I-DNA',\r\n 'B-RNA',\r\n 'I-RNA',\r\n 'B-cell_line',\r\n 'I-cell_line',\r\n 'B-cell_type',\r\n 'I-cell_type',\r\n 'B-protein',\r\n 'I-protein']\r\n\r\n## Actual results\r\nThe dataset loader script needs to include the following NER names:\r\n\r\n['O',\r\n 'B-DNA',\r\n 'I-DNA',\r\n 'B-RNA',\r\n 'I-RNA',\r\n 'B-cell_line',\r\n 'I-cell_line',\r\n 'B-cell_type',\r\n 'I-cell_type',\r\n 'B-protein',\r\n 'I-protein']\r\n\r\nAnd the [data](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/jnlpba\/jnlpba.py#L46) that is being pulled has been modified from the original dataset and does not include the original NER tags.\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform:\r\n- Python version:\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3089\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3089\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3088","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3088\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3088\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3088\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3088","id":1026920369,"node_id":"PR_kwDODunzps4tOhRx","number":3088,"title":"Use template column_mapping to transmit_format instead of template features","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-14T23:49:40Z","updated_at":"2021-10-15T14:40:05Z","closed_at":"2021-10-15T10:11:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3088","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3088","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3088.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3088.patch","merged_at":"2021-10-15T10:11:04Z"},"body":"Use `template.column_mapping` to check for modified columns since `template.features` represent a generic template\/column mapping. \r\n\r\nFix #3087 \r\n\r\n\r\nTODO:\r\n- [x] Add a test","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3088\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3088\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3087","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3087\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3087\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3087\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3087","id":1026780469,"node_id":"I_kwDODunzps49M201","number":3087,"title":"Removing label column in a text classification dataset yields to errors","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-14T20:12:50Z","updated_at":"2021-10-15T10:11:04Z","closed_at":"2021-10-15T10:11:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nThis looks like #3059 but it's not linked to the cache this time. Removing the `label` column from a text classification dataset and then performing any processing will result in an error.\r\n\r\nTo reproduce:\r\n```py\r\nfrom datasets import load_dataset\r\nfrom transformers import AutoTokenizer\r\n\r\nraw_datasets = load_dataset(\"imdb\")\r\nraw_datasets = raw_datasets.remove_columns(\"label\")\r\n\r\nmodel_checkpoint = \"distilbert-base-cased\"\r\ntokenizer = AutoTokenizer.from_pretrained(model_checkpoint)\r\ncontext_length = 128\r\n\r\ndef tokenize_pad_and_truncate(texts):\r\n return tokenizer(texts[\"text\"], truncation=True, padding=\"max_length\", max_length=context_length)\r\n\r\ntokenized_datasets = raw_datasets.map(tokenize_pad_and_truncate, batched=True)\r\n```\r\n\r\n\r\nTraceback:\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n in \r\n 12 return tokenizer(texts[\"text\"], truncation=True, padding=\"max_length\", max_length=context_length)\r\n 13 \r\n---> 14 tokenized_datasets = raw_datasets.map(tokenize_pad_and_truncate, batched=True)\r\n\r\n~\/git\/datasets\/src\/datasets\/dataset_dict.py in map(self, function, with_indices, input_columns, batched, batch_size, remove_columns, keep_in_memory, load_from_cache_file, cache_file_names, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, desc)\r\n 500 desc=desc,\r\n 501 )\r\n--> 502 for k, dataset in self.items()\r\n 503 }\r\n 504 )\r\n\r\n~\/git\/datasets\/src\/datasets\/dataset_dict.py in (.0)\r\n 500 desc=desc,\r\n 501 )\r\n--> 502 for k, dataset in self.items()\r\n 503 }\r\n 504 )\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint, desc)\r\n 2051 new_fingerprint=new_fingerprint,\r\n 2052 disable_tqdm=disable_tqdm,\r\n-> 2053 desc=desc,\r\n 2054 )\r\n 2055 else:\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 501 self: \"Dataset\" = kwargs.pop(\"self\")\r\n 502 # apply actual function\r\n--> 503 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 504 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 505 for dataset in datasets:\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 468 }\r\n 469 # apply actual function\r\n--> 470 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 471 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 472 # re-apply format to the output\r\n\r\n~\/git\/datasets\/src\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 404 # Call actual function\r\n 405 \r\n--> 406 out = func(self, *args, **kwargs)\r\n 407 \r\n 408 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, disable_tqdm, desc, cache_only)\r\n 2243 if os.path.exists(cache_file_name) and load_from_cache_file:\r\n 2244 logger.warning(\"Loading cached processed dataset at %s\", cache_file_name)\r\n-> 2245 info = self.info.copy()\r\n 2246 info.features = features\r\n 2247 info.task_templates = None\r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in copy(self)\r\n 278 \r\n 279 def copy(self) -> \"DatasetInfo\":\r\n--> 280 return self.__class__(**{k: copy.deepcopy(v) for k, v in self.__dict__.items()})\r\n 281 \r\n 282 \r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in __init__(self, description, citation, homepage, license, features, post_processed, supervised_keys, task_templates, builder_name, config_name, version, splits, download_checksums, download_size, post_processing_size, dataset_size, size_in_bytes)\r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in __post_init__(self)\r\n 177 for idx, template in enumerate(self.task_templates):\r\n 178 if isinstance(template, TextClassification):\r\n--> 179 labels = self.features[template.label_column].names\r\n 180 self.task_templates[idx] = TextClassification(\r\n 181 text_column=template.text_column, label_column=template.label_column, labels=labels\r\n\r\nKeyError: 'label'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3087\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3087\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3086","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3086\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3086\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3086\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3086","id":1026481905,"node_id":"PR_kwDODunzps4tNIvp","number":3086,"title":"Remove _resampler from Audio fields","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-14T14:38:50Z","updated_at":"2021-10-14T15:13:41Z","closed_at":"2021-10-14T15:13:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3086","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3086","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3086.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3086.patch","merged_at":"2021-10-14T15:13:40Z"},"body":"The `_resampler` Audio attribute was implemented to optimize audio resampling, but it should not be cached.\r\n\r\nThis PR removes `_resampler` from Audio fields, so that it is not returned by `fields()` or `asdict()`.\r\n\r\nFix #3083.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3086\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3086\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3085","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3085\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3085\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3085\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3085","id":1026467384,"node_id":"PR_kwDODunzps4tNFza","number":3085,"title":"Fixes to `to_tf_dataset`","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-14T14:25:56Z","updated_at":"2021-10-21T15:05:29Z","closed_at":"2021-10-21T15:05:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3085","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3085","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3085.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3085.patch","merged_at":"2021-10-21T15:05:28Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3085\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3085\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3084","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3084\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3084\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3084\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3084","id":1026428992,"node_id":"I_kwDODunzps49LhBA","number":3084,"title":"VisibleDeprecationWarning when using `set_format(\"numpy\")`","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"assignees":[{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-10-14T13:53:01Z","updated_at":"2021-10-22T16:04:14Z","closed_at":"2021-10-22T16:04:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Code to reproduce:\r\n\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"glue\", \"mnli\")\r\n\r\nfrom transformers import AutoTokenizer\r\ntokenizer = AutoTokenizer.from_pretrained('distilbert-base-cased')\r\n\r\ndef tokenize_function(dataset):\r\n return tokenizer(dataset['premise'])\r\n\r\ntokenized_datasets = dataset.map(tokenize_function, batched=True, remove_columns=dataset['train'].features)\r\n\r\ntokenized_datasets.set_format(\"numpy\")\r\n\r\ntokenized_datasets['train'][5:8]\r\n```\r\nOutputs:\r\n\r\n```\r\npython3.9\/site-packages\/datasets\/formatting\/formatting.py:167: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray\r\n return np.array(array, copy=False, **self.np_array_kwargs)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3084\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3084\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3083","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3083\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3083\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3083\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3083","id":1026397062,"node_id":"I_kwDODunzps49LZOG","number":3083,"title":"Datasets with Audio feature raise error when loaded from cache due to _resampler parameter","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-14T13:23:53Z","updated_at":"2021-10-14T15:13:40Z","closed_at":"2021-10-14T15:13:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAs reported by @patrickvonplaten, when loaded from the cache, datasets containing the Audio feature raise TypeError.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n# load first time works\r\nds = load_dataset(\"patrickvonplaten\/librispeech_asr_dummy\", \"clean\") \r\n\r\n# load from cache breaks\r\nds = load_dataset(\"patrickvonplaten\/librispeech_asr_dummy\", \"clean\") \r\n```\r\n\r\n## Actual results\r\n```\r\nTypeError: __init__() got an unexpected keyword argument '_resampler'\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3083\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3083\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3082","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3082\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3082\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3082\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3082","id":1026388994,"node_id":"PR_kwDODunzps4tM2BV","number":3082,"title":"Fix error related to huggingface_hub timeout parameter","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-14T13:17:47Z","updated_at":"2021-10-14T14:39:52Z","closed_at":"2021-10-14T14:39:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3082","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3082","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3082.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3082.patch","merged_at":"2021-10-14T14:39:51Z"},"body":"The `huggingface_hub` package added the parameter `timeout` from version 0.0.19.\r\n\r\nThis PR bumps this minimal version.\r\n\r\nFix #3080.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3082\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3082\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3081","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3081\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3081\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3081\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3081","id":1026383749,"node_id":"PR_kwDODunzps4tM1Gy","number":3081,"title":"[Audio datasets] Adapting all audio datasets","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-14T13:13:45Z","updated_at":"2021-10-15T12:52:03Z","closed_at":"2021-10-15T12:22:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3081","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3081","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3081.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3081.patch","merged_at":"2021-10-15T12:22:33Z"},"body":"This PR adds the new `Audio(...)` features - see: https:\/\/github.com\/huggingface\/datasets\/pull\/2324 to the most important audio datasets:\r\n\r\n- Librispeech\r\n- Timit\r\n- Common Voice\r\n- AMI\r\n- ... (others I'm forgetting now)\r\n\r\nThe PR is curently blocked because the following leads to a problem:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n# load first time works\r\nds = load_dataset(\"patrickvonplaten\/librispeech_asr_dummy\", \"clean\") \r\n\r\n# load from cache breaks\r\nds = load_dataset(\"patrickvonplaten\/librispeech_asr_dummy\", \"clean\") \r\n```\r\n\r\nAs soon as it's unblocked, I'll adapt the other audio datasets as well. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3081\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3081\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3080","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3080\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3080\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3080\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3080","id":1026380626,"node_id":"I_kwDODunzps49LVNS","number":3080,"title":"Error related to timeout keyword argument","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-14T13:10:58Z","updated_at":"2021-10-14T14:39:51Z","closed_at":"2021-10-14T14:39:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAs reported by @patrickvonplaten, a TypeError is raised when trying to load a dataset.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nds = load_dataset(\"patrickvonplaten\/librispeech_asr_dummy\", \"clean\") \r\n```\r\n\r\n## Actual results\r\n```\r\nTypeError: dataset_info() got an unexpected keyword argument 'timeout'\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3080\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3080\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3077","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3077\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3077\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3077\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3077","id":1026150362,"node_id":"PR_kwDODunzps4tMFPG","number":3077,"title":"Fix loading a metric with internal import","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-14T09:06:58Z","updated_at":"2021-10-14T09:14:56Z","closed_at":"2021-10-14T09:14:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3077","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3077","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3077.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3077.patch","merged_at":"2021-10-14T09:14:55Z"},"body":"After refactoring the module factory (#2986), a bug was introduced when loading metrics with internal imports.\r\n\r\nThis PR adds a new test case and fixes this bug.\r\n\r\nFix #3076.\r\n\r\nCC: @sgugger @merveenoyan ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3077\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3077\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3076","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3076\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3076\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3076\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3076","id":1026113484,"node_id":"I_kwDODunzps49KT_M","number":3076,"title":"Error when loading a metric","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-14T08:29:27Z","updated_at":"2021-10-14T09:14:55Z","closed_at":"2021-10-14T09:14:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAs reported by @sgugger, after last release, exception is thrown when loading a metric.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_metric\r\n\r\nmetric = load_metric(\"squad_v2\")\r\n```\r\n\r\n## Actual results\r\n```\r\nFileNotFoundError Traceback (most recent call last)\r\n in \r\n 1 from datasets import load_metric\r\n----> 2 metric = load_metric(\"squad_v2\")\r\n\r\nd:\\projects\\huggingface\\datasets\\src\\datasets\\load.py in load_metric(path, config_name, process_id, num_process, cache_dir, experiment_id, keep_in_memory, download_config, download_mode, revision, script_version, **metric_init_kwargs)\r\n 1336 )\r\n 1337 revision = script_version\r\n-> 1338 metric_module = metric_module_factory(\r\n 1339 path, revision=revision, download_config=download_config, download_mode=download_mode\r\n 1340 ).module_path\r\n\r\nd:\\projects\\huggingface\\datasets\\src\\datasets\\load.py in metric_module_factory(path, revision, download_config, download_mode, force_local_path, dynamic_modules_path, **download_kwargs)\r\n 1237 if not isinstance(e1, FileNotFoundError):\r\n 1238 raise e1 from None\r\n-> 1239 raise FileNotFoundError(\r\n 1240 f\"Couldn't find a metric script at {relative_to_absolute_path(combined_path)}. \"\r\n 1241 f\"Metric '{path}' doesn't exist on the Hugging Face Hub either.\"\r\n\r\nFileNotFoundError: Couldn't find a metric script at D:\\projects\\huggingface\\datasets\\squad_v2\\squad_v2.py. Metric 'squad_v2' doesn't exist on the Hugging Face Hub either.\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3076\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3076\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3075","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3075\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3075\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3075\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3075","id":1026103388,"node_id":"PR_kwDODunzps4tL75E","number":3075,"title":"Updates LexGLUE and MultiEURLEX README.md files","user":{"login":"iliaschalkidis","id":1626984,"node_id":"MDQ6VXNlcjE2MjY5ODQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1626984?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliaschalkidis","html_url":"https:\/\/github.com\/iliaschalkidis","followers_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/followers","following_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/repos","events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-14T08:19:16Z","updated_at":"2021-10-18T10:13:40Z","closed_at":"2021-10-18T10:13:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3075","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3075","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3075.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3075.patch","merged_at":"2021-10-18T10:13:40Z"},"body":"Updates LexGLUE and MultiEURLEX README.md files\r\n\r\n- Fix leaderboard in LexGLUE.\r\n- Fix an error in the CaseHOLD data example.\r\n- Turn MultiEURLEX dataset statistics table into HTML to nicely render in HF website.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3075\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3075\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3074","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3074\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3074\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3074\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3074","id":1025940085,"node_id":"PR_kwDODunzps4tLbe-","number":3074,"title":"add XCSR dataset","user":{"login":"yangxqiao","id":42788901,"node_id":"MDQ6VXNlcjQyNzg4OTAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42788901?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yangxqiao","html_url":"https:\/\/github.com\/yangxqiao","followers_url":"https:\/\/api.github.com\/users\/yangxqiao\/followers","following_url":"https:\/\/api.github.com\/users\/yangxqiao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yangxqiao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yangxqiao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yangxqiao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yangxqiao\/orgs","repos_url":"https:\/\/api.github.com\/users\/yangxqiao\/repos","events_url":"https:\/\/api.github.com\/users\/yangxqiao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yangxqiao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-14T04:39:59Z","updated_at":"2021-11-08T13:52:36Z","closed_at":"2021-11-08T13:52:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3074","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3074","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3074.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3074.patch","merged_at":"2021-11-08T13:52:36Z"},"body":"Hi,\r\n\r\nI wanted to add the [XCSR ](https:\/\/inklab.usc.edu\/\/XCSR\/xcsr_datasets) dataset to huggingface! :) \r\n\r\nI followed the instructions of adding new dataset to huggingface and have all the required files ready now! It would be super helpful if you can take a look and review them. Thanks in advance for your time and help. Look forward to hearing from you and can't wait to add XCSR to huggingface :D ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3074\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3074\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3073","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3073\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3073\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3073\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3073","id":1025718469,"node_id":"I_kwDODunzps49IzjF","number":3073,"title":"Import error installing with ppc64le","user":{"login":"gcervantes8","id":21228908,"node_id":"MDQ6VXNlcjIxMjI4OTA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21228908?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gcervantes8","html_url":"https:\/\/github.com\/gcervantes8","followers_url":"https:\/\/api.github.com\/users\/gcervantes8\/followers","following_url":"https:\/\/api.github.com\/users\/gcervantes8\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gcervantes8\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gcervantes8\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gcervantes8\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gcervantes8\/orgs","repos_url":"https:\/\/api.github.com\/users\/gcervantes8\/repos","events_url":"https:\/\/api.github.com\/users\/gcervantes8\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gcervantes8\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-13T21:37:23Z","updated_at":"2021-10-14T16:35:46Z","closed_at":"2021-10-14T16:33:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nInstalling the datasets library with a computer running with ppc64le seems to cause an issue when importing the datasets library.\r\n\r\n\r\n```\r\npython\r\nPython 3.6.13 | packaged by conda-forge | (default, Sep 23 2021, 07:37:44) \r\n[GCC 9.4.0] on linux\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n>>> import datasets\r\nIllegal instruction (core dumped)\r\n```\r\n\r\nError when importing\r\n`Illegal instruction (core dumped)`\r\n\r\n## Steps to reproduce the bug\r\nI get this error when installing the library by using conda. I can't install with pip I believe because pyarrow only has the ppc64le library on conda forge\r\n```\r\nconda create --name transformers_py36_v2 python=3.6 \r\nconda activate transformers_py36_v2 \r\nconda install datasets \r\n```\r\n\r\n## Tracebacks\r\nconda create --name transformers_py36_v2 python=3.6\r\n\r\n\r\n```\r\nCollecting package metadata (current_repodata.json): done\r\nSolving environment: done\r\n\r\n\r\n==> WARNING: A newer version of conda exists. <==\r\n current version: 4.9.2\r\n latest version: 4.10.3\r\n\r\nPlease update conda by running\r\n\r\n $ conda update -n base -c defaults conda\r\n\r\n\r\n\r\n## Package Plan ##\r\n\r\n environment location: \/p\/home\/gerryc\/.conda\/envs\/transformers_py36_v2\r\n\r\n added \/ updated specs:\r\n - python=3.6\r\n\r\n\r\nThe following NEW packages will be INSTALLED:\r\n\r\n _libgcc_mutex conda-forge\/linux-ppc64le::_libgcc_mutex-0.1-conda_forge\r\n _openmp_mutex conda-forge\/linux-ppc64le::_openmp_mutex-4.5-1_gnu\r\n ca-certificates conda-forge\/linux-ppc64le::ca-certificates-2021.10.8-h1084571_0\r\n certifi pkgs\/main\/linux-ppc64le::certifi-2020.12.5-py36h6ffa863_0\r\n ld_impl_linux-ppc~ conda-forge\/linux-ppc64le::ld_impl_linux-ppc64le-2.36.1-ha35d02b_2\r\n libffi conda-forge\/linux-ppc64le::libffi-3.4.2-h3b9df90_4\r\n libgcc-ng conda-forge\/linux-ppc64le::libgcc-ng-11.2.0-h7698a5e_11\r\n libgomp conda-forge\/linux-ppc64le::libgomp-11.2.0-h7698a5e_11\r\n libstdcxx-ng conda-forge\/linux-ppc64le::libstdcxx-ng-11.2.0-habdf983_11\r\n libzlib conda-forge\/linux-ppc64le::libzlib-1.2.11-h339bb43_1013\r\n ncurses conda-forge\/linux-ppc64le::ncurses-6.2-hea85c5d_4\r\n openssl conda-forge\/linux-ppc64le::openssl-1.1.1l-h4e0d66e_0\r\n pip conda-forge\/noarch::pip-21.3-pyhd8ed1ab_0\r\n python conda-forge\/linux-ppc64le::python-3.6.13-h57873ef_2_cpython\r\n readline conda-forge\/linux-ppc64le::readline-8.1-h5c45dff_0\r\n setuptools pkgs\/main\/linux-ppc64le::setuptools-58.0.4-py36h6ffa863_0\r\n sqlite conda-forge\/linux-ppc64le::sqlite-3.36.0-h4e2196e_2\r\n tk conda-forge\/linux-ppc64le::tk-8.6.11-h41c6715_1\r\n wheel conda-forge\/noarch::wheel-0.37.0-pyhd8ed1ab_1\r\n xz conda-forge\/linux-ppc64le::xz-5.2.5-h6eb9509_1\r\n zlib conda-forge\/linux-ppc64le::zlib-1.2.11-h339bb43_1013\r\n\r\n\r\nProceed ([y]\/n)? y\r\n\r\nPreparing transaction: done\r\nVerifying transaction: done\r\nExecuting transaction: done\r\n#\r\n# To activate this environment, use\r\n#\r\n# $ conda activate transformers_py36_v2\r\n#\r\n# To deactivate an active environment, use\r\n#\r\n# $ conda deactivate\r\n```\r\n\r\n\r\nconda activate transformers_py36_v2\r\nconda install datasets\r\n```\r\nCollecting package metadata (current_repodata.json): done\r\nSolving environment: failed with initial frozen solve. Retrying with flexible solve.\r\nSolving environment: failed with repodata from current_repodata.json, will retry with next repodata source.\r\nCollecting package metadata (repodata.json): done\r\nSolving environment: done\r\n\r\n\r\n==> WARNING: A newer version of conda exists. <==\r\n current version: 4.9.2\r\n latest version: 4.10.3\r\n\r\nPlease update conda by running\r\n\r\n $ conda update -n base -c defaults conda\r\n\r\n\r\n\r\n## Package Plan ##\r\n\r\n environment location: \/p\/home\/gerryc\/.conda\/envs\/transformers_py36_v2\r\n\r\n added \/ updated specs:\r\n - datasets\r\n\r\n\r\nThe following NEW packages will be INSTALLED:\r\n\r\n abseil-cpp conda-forge\/linux-ppc64le::abseil-cpp-20210324.2-h3b9df90_0\r\n aiohttp conda-forge\/linux-ppc64le::aiohttp-3.7.4.post0-py36hc33305d_0\r\n arrow-cpp conda-forge\/linux-ppc64le::arrow-cpp-5.0.0-py36hf9cf308_8_cpu\r\n async-timeout conda-forge\/noarch::async-timeout-3.0.1-py_1000\r\n attrs conda-forge\/noarch::attrs-21.2.0-pyhd8ed1ab_0\r\n aws-c-cal conda-forge\/linux-ppc64le::aws-c-cal-0.5.11-hb3fac3d_0\r\n aws-c-common conda-forge\/linux-ppc64le::aws-c-common-0.6.2-h4e0d66e_0\r\n aws-c-event-stream conda-forge\/linux-ppc64le::aws-c-event-stream-0.2.7-h76da5f2_13\r\n aws-c-io conda-forge\/linux-ppc64le::aws-c-io-0.10.5-hf6a6c7c_0\r\n aws-checksums conda-forge\/linux-ppc64le::aws-checksums-0.1.11-hfe76d68_7\r\n aws-sdk-cpp conda-forge\/linux-ppc64le::aws-sdk-cpp-1.8.186-h90855e8_3\r\n brotlipy conda-forge\/linux-ppc64le::brotlipy-0.7.0-py36hc33305d_1001\r\n bzip2 conda-forge\/linux-ppc64le::bzip2-1.0.8-h4e0d66e_4\r\n c-ares conda-forge\/linux-ppc64le::c-ares-1.17.2-h4e0d66e_0\r\n cffi conda-forge\/linux-ppc64le::cffi-1.14.6-py36h021ab3c_1\r\n chardet conda-forge\/linux-ppc64le::chardet-4.0.0-py36h270354c_1\r\n colorama conda-forge\/noarch::colorama-0.4.4-pyh9f0ad1d_0\r\n cryptography conda-forge\/linux-ppc64le::cryptography-3.4.7-py36hc71b123_0\r\n dataclasses conda-forge\/noarch::dataclasses-0.8-pyh787bdff_2\r\n datasets conda-forge\/noarch::datasets-1.12.1-pyhd8ed1ab_1\r\n dill conda-forge\/noarch::dill-0.3.4-pyhd8ed1ab_0\r\n filelock conda-forge\/noarch::filelock-3.3.0-pyhd8ed1ab_0\r\n fsspec conda-forge\/noarch::fsspec-2021.10.0-pyhd8ed1ab_0\r\n gflags conda-forge\/linux-ppc64le::gflags-2.2.2-hb209c28_1004\r\n glog conda-forge\/linux-ppc64le::glog-0.5.0-h4040248_0\r\n grpc-cpp conda-forge\/linux-ppc64le::grpc-cpp-1.40.0-h2bf711c_2\r\n huggingface_hub conda-forge\/noarch::huggingface_hub-0.0.19-pyhd8ed1ab_0\r\n idna conda-forge\/noarch::idna-2.10-pyh9f0ad1d_0\r\n idna_ssl conda-forge\/noarch::idna_ssl-1.0.0-0\r\n importlib-metadata conda-forge\/linux-ppc64le::importlib-metadata-4.8.1-py36h270354c_0\r\n importlib_metadata conda-forge\/noarch::importlib_metadata-4.8.1-hd8ed1ab_0\r\n krb5 conda-forge\/linux-ppc64le::krb5-1.19.2-haf43566_2\r\n libblas conda-forge\/linux-ppc64le::libblas-3.9.0-11_linuxppc64le_openblas\r\n libbrotlicommon conda-forge\/linux-ppc64le::libbrotlicommon-1.0.9-h4e0d66e_5\r\n libbrotlidec conda-forge\/linux-ppc64le::libbrotlidec-1.0.9-h4e0d66e_5\r\n libbrotlienc conda-forge\/linux-ppc64le::libbrotlienc-1.0.9-h4e0d66e_5\r\n libcblas conda-forge\/linux-ppc64le::libcblas-3.9.0-11_linuxppc64le_openblas\r\n libcurl conda-forge\/linux-ppc64le::libcurl-7.79.1-he415e40_1\r\n libedit conda-forge\/linux-ppc64le::libedit-3.1.20191231-h41a240f_2\r\n libev conda-forge\/linux-ppc64le::libev-4.33-h6eb9509_1\r\n libevent conda-forge\/linux-ppc64le::libevent-2.1.10-h97db324_4\r\n libgfortran-ng conda-forge\/linux-ppc64le::libgfortran-ng-11.2.0-hfdc3801_11\r\n libgfortran5 conda-forge\/linux-ppc64le::libgfortran5-11.2.0-he58fbb4_11\r\n liblapack conda-forge\/linux-ppc64le::liblapack-3.9.0-11_linuxppc64le_openblas\r\n libnghttp2 conda-forge\/linux-ppc64le::libnghttp2-1.43.0-h42039ad_1\r\n libopenblas conda-forge\/linux-ppc64le::libopenblas-0.3.17-pthreads_h486567c_1\r\n libprotobuf conda-forge\/linux-ppc64le::libprotobuf-3.18.1-h690f14c_0\r\n libssh2 conda-forge\/linux-ppc64le::libssh2-1.10.0-ha5a9321_2\r\n libthrift conda-forge\/linux-ppc64le::libthrift-0.15.0-h54f692e_1\r\n libutf8proc conda-forge\/linux-ppc64le::libutf8proc-2.6.1-h4e0d66e_0\r\n lz4-c conda-forge\/linux-ppc64le::lz4-c-1.9.3-h3b9df90_1\r\n multidict conda-forge\/linux-ppc64le::multidict-5.2.0-py36hc33305d_0\r\n multiprocess conda-forge\/linux-ppc64le::multiprocess-0.70.12.2-py36hc33305d_0\r\n numpy conda-forge\/linux-ppc64le::numpy-1.19.5-py36h86665d4_1\r\n orc conda-forge\/linux-ppc64le::orc-1.7.0-hae6b4bd_0\r\n packaging conda-forge\/noarch::packaging-21.0-pyhd8ed1ab_0\r\n pandas conda-forge\/linux-ppc64le::pandas-1.1.5-py36hab1a6e6_0\r\n parquet-cpp conda-forge\/noarch::parquet-cpp-1.5.1-2\r\n pyarrow conda-forge\/linux-ppc64le::pyarrow-5.0.0-py36h7a46c7e_8_cpu\r\n pycparser conda-forge\/noarch::pycparser-2.20-pyh9f0ad1d_2\r\n pyopenssl conda-forge\/noarch::pyopenssl-21.0.0-pyhd8ed1ab_0\r\n pyparsing conda-forge\/noarch::pyparsing-2.4.7-pyh9f0ad1d_0\r\n pysocks conda-forge\/linux-ppc64le::pysocks-1.7.1-py36h270354c_3\r\n python-dateutil conda-forge\/noarch::python-dateutil-2.8.2-pyhd8ed1ab_0\r\n python-xxhash conda-forge\/linux-ppc64le::python-xxhash-2.0.2-py36hc33305d_0\r\n python_abi conda-forge\/linux-ppc64le::python_abi-3.6-2_cp36m\r\n pytz conda-forge\/noarch::pytz-2021.3-pyhd8ed1ab_0\r\n pyyaml conda-forge\/linux-ppc64le::pyyaml-5.4.1-py36hc33305d_1\r\n re2 conda-forge\/linux-ppc64le::re2-2021.09.01-h3b9df90_0\r\n requests conda-forge\/noarch::requests-2.25.1-pyhd3deb0d_0\r\n s2n conda-forge\/linux-ppc64le::s2n-1.0.10-h97db324_0\r\n six conda-forge\/noarch::six-1.16.0-pyh6c4a22f_0\r\n snappy conda-forge\/linux-ppc64le::snappy-1.1.8-hb209c28_3\r\n tqdm conda-forge\/noarch::tqdm-4.62.3-pyhd8ed1ab_0\r\n typing-extensions conda-forge\/noarch::typing-extensions-3.10.0.2-hd8ed1ab_0\r\n typing_extensions conda-forge\/noarch::typing_extensions-3.10.0.2-pyha770c72_0\r\n urllib3 conda-forge\/noarch::urllib3-1.26.7-pyhd8ed1ab_0\r\n xxhash conda-forge\/linux-ppc64le::xxhash-0.8.0-h4e0d66e_3\r\n yaml conda-forge\/linux-ppc64le::yaml-0.2.5-h6eb9509_0\r\n yarl conda-forge\/linux-ppc64le::yarl-1.6.3-py36hc33305d_2\r\n zipp conda-forge\/noarch::zipp-3.6.0-pyhd8ed1ab_0\r\n zstd conda-forge\/linux-ppc64le::zstd-1.5.0-h65c4b1a_0\r\n\r\nThe following packages will be UPDATED:\r\n\r\n certifi pkgs\/main::certifi-2020.12.5-py36h6ff~ --> conda-forge::certifi-2021.5.30-py36h270354c_0\r\n\r\n\r\nProceed ([y]\/n)? y\r\n\r\nPreparing transaction: done\r\nVerifying transaction: done\r\nExecuting transaction: done\r\n```\r\n\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Red Hat Enterprise Linux 8.2 (Ootpa)\r\n- Python version: 3.6\r\n- PyArrow version: pyarrow - 5.0.0 - py36h7a46c7e_8_cpu - conda-forge\r\n\r\n\r\nAny help would be appreciated! I've been struggling on installing datasets on this machine.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3073\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3073\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3072","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3072\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3072\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3072\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3072","id":1025233152,"node_id":"PR_kwDODunzps4tJNnD","number":3072,"title":"Fix pathlib patches for streaming","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-13T13:11:15Z","updated_at":"2021-10-13T13:31:05Z","closed_at":"2021-10-13T13:31:05Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3072","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3072","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3072.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3072.patch","merged_at":"2021-10-13T13:31:05Z"},"body":"Fix issue https:\/\/github.com\/huggingface\/datasets\/issues\/2866 (for good this time)\r\n\r\n`counter` now works in both streaming and non-streaming mode.\r\nAnd the `AttributeError: 'str' object has no attribute 'as_posix'` related to the patch of Path.open is fixed as well\r\n\r\nNote : the patches should only affect the datasets module, not the user's ones ! That's why we should probably use something else than patch.object to patch the Path class' methods.\r\n\r\ncc @severo @albertvillanova ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3072\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3072\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3071","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3071\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3071\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3071\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3071","id":1024893493,"node_id":"I_kwDODunzps49FqI1","number":3071,"title":"Custom plain text dataset, plain json dataset and plain csv dataset are remove from datasets template folder","user":{"login":"zixiliuUSC","id":49173327,"node_id":"MDQ6VXNlcjQ5MTczMzI3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49173327?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zixiliuUSC","html_url":"https:\/\/github.com\/zixiliuUSC","followers_url":"https:\/\/api.github.com\/users\/zixiliuUSC\/followers","following_url":"https:\/\/api.github.com\/users\/zixiliuUSC\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zixiliuUSC\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zixiliuUSC\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zixiliuUSC\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zixiliuUSC\/orgs","repos_url":"https:\/\/api.github.com\/users\/zixiliuUSC\/repos","events_url":"https:\/\/api.github.com\/users\/zixiliuUSC\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zixiliuUSC\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-10-13T07:32:10Z","updated_at":"2021-10-13T08:27:04Z","closed_at":"2021-10-13T08:27:03Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** text, json, csv\r\n- **Description:** I am developing a customized dataset loading script. The problem is mainly about my custom dataset is seperate into many files and I only find a dataset loading template in [https:\/\/github.com\/huggingface\/datasets\/blob\/1.2.1\/datasets\/json\/json.py](https:\/\/github.com\/huggingface\/datasets\/blob\/1.2.1\/datasets\/json\/json.py) that can handle my circumstance. I'm afraid these templates are too old to use. Could you re-add these three templates to current master branch?\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3071\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3071\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3070","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3070\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3070\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3070\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3070","id":1024856745,"node_id":"PR_kwDODunzps4tIBRp","number":3070,"title":"Fix Windows CI with FileNotFoundError when stting up s3_base fixture","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-13T06:49:01Z","updated_at":"2021-10-13T08:55:13Z","closed_at":"2021-10-13T06:49:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3070","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3070","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3070.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3070.patch","merged_at":"2021-10-13T06:49:48Z"},"body":"Fix #3069.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3070\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3070\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3069","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3069\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3069\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3069\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3069","id":1024818680,"node_id":"I_kwDODunzps49FX34","number":3069,"title":"CI fails on Windows with FileNotFoundError when stting up s3_base fixture","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-13T05:52:26Z","updated_at":"2021-10-13T08:05:49Z","closed_at":"2021-10-13T06:49:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAfter commit 9353fc863d0c99ab0427f83cc5a4f04fcf52f1df, the CI fails on Windows with FileNotFoundError when stting up s3_base fixture. See: https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8151\/workflows\/5db8d154-badd-4d3d-b202-ca7a318997a2\/jobs\/50321\r\n\r\nError summary:\r\n```\r\nERROR tests\/test_arrow_dataset.py::test_dummy_dataset_serialize_s3 - FileNotF...\r\nERROR tests\/test_dataset_dict.py::test_dummy_dataset_serialize_s3 - FileNotFo...\r\n```\r\n\r\nStack trace:\r\n```\r\n______________ ERROR at setup of test_dummy_dataset_serialize_s3 ______________\r\n[gw0] win32 -- Python 3.6.8 C:\\tools\\miniconda3\\python.exe\r\n\r\n @pytest.fixture()\r\n def s3_base():\r\n # writable local S3 system\r\n import shlex\r\n import subprocess\r\n \r\n # Mocked AWS Credentials for moto.\r\n old_environ = os.environ.copy()\r\n os.environ.update(S3_FAKE_ENV_VARS)\r\n \r\n> proc = subprocess.Popen(shlex.split(\"moto_server s3 -p %s\" % s3_port))\r\n\r\ntests\\s3_fixtures.py:32: \r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\r\nC:\\tools\\miniconda3\\lib\\subprocess.py:729: in __init__\r\n restore_signals, start_new_session)\r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\r\n\r\nself = \r\nargs = 'moto_server s3 -p 5555', executable = None, preexec_fn = None\r\nclose_fds = True, pass_fds = (), cwd = None, env = None\r\nstartupinfo = \r\ncreationflags = 0, shell = False, p2cread = -1, p2cwrite = -1, c2pread = -1\r\nc2pwrite = -1, errread = -1, errwrite = -1, unused_restore_signals = True\r\nunused_start_new_session = False\r\n\r\n def _execute_child(self, args, executable, preexec_fn, close_fds,\r\n pass_fds, cwd, env,\r\n startupinfo, creationflags, shell,\r\n p2cread, p2cwrite,\r\n c2pread, c2pwrite,\r\n errread, errwrite,\r\n unused_restore_signals, unused_start_new_session):\r\n \"\"\"Execute program (MS Windows version)\"\"\"\r\n \r\n assert not pass_fds, \"pass_fds not supported on Windows.\"\r\n \r\n if not isinstance(args, str):\r\n args = list2cmdline(args)\r\n \r\n # Process startup details\r\n if startupinfo is None:\r\n startupinfo = STARTUPINFO()\r\n if -1 not in (p2cread, c2pwrite, errwrite):\r\n startupinfo.dwFlags |= _winapi.STARTF_USESTDHANDLES\r\n startupinfo.hStdInput = p2cread\r\n startupinfo.hStdOutput = c2pwrite\r\n startupinfo.hStdError = errwrite\r\n \r\n if shell:\r\n startupinfo.dwFlags |= _winapi.STARTF_USESHOWWINDOW\r\n startupinfo.wShowWindow = _winapi.SW_HIDE\r\n comspec = os.environ.get(\"COMSPEC\", \"cmd.exe\")\r\n args = '{} \/c \"{}\"'.format (comspec, args)\r\n \r\n # Start the process\r\n try:\r\n hp, ht, pid, tid = _winapi.CreateProcess(executable, args,\r\n # no special security\r\n None, None,\r\n int(not close_fds),\r\n creationflags,\r\n env,\r\n os.fspath(cwd) if cwd is not None else None,\r\n> startupinfo)\r\nE FileNotFoundError: [WinError 2] The system cannot find the file specified\r\n\r\nC:\\tools\\miniconda3\\lib\\subprocess.py:1017: FileNotFoundError\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3069\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3069\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3068","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3068\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3068\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3068\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3068","id":1024681264,"node_id":"PR_kwDODunzps4tHhOC","number":3068,"title":"feat: increase streaming retry config","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-13T02:00:50Z","updated_at":"2021-10-13T09:25:56Z","closed_at":"2021-10-13T09:25:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3068","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3068","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3068.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3068.patch","merged_at":"2021-10-13T09:25:54Z"},"body":"Increase streaming config parameters:\r\n* retry interval set to 5 seconds\r\n* max retries set to 20 (so 1mn 40s)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3068\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3068\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3067","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3067\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3067\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3067\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3067","id":1024023185,"node_id":"PR_kwDODunzps4tFSCy","number":3067,"title":"add story_cloze","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-12T16:36:53Z","updated_at":"2021-10-13T13:48:13Z","closed_at":"2021-10-13T13:48:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3067","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3067","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3067.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3067.patch","merged_at":"2021-10-13T13:48:13Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3067\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3067\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3066","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3066\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3066\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3066\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3066","id":1024005311,"node_id":"PR_kwDODunzps4tFObl","number":3066,"title":"Add iter_archive","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-12T16:17:16Z","updated_at":"2021-10-18T09:12:47Z","closed_at":"2021-10-18T09:12:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3066","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3066","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3066.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3066.patch","merged_at":"2021-10-18T09:12:46Z"},"body":"Added the `iter_archive` method for the StreamingDownloadManager.\r\n\r\nIt was already implemented in the regular DownloadManager.\r\nNow it can be used to stream from TAR archives as mentioned in https:\/\/github.com\/huggingface\/datasets\/issues\/2829\r\n\r\nI also updated the `food101` dataset as an example.\r\nAny image\/audio dataset using TAR archives can be updated to use `iter_archive` in order to be streamable :)\r\n\r\ncc @severo ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3066\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3066\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3065","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3065\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3065\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3065\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3065","id":1023951322,"node_id":"PR_kwDODunzps4tFDjk","number":3065,"title":"Fix test command after refac","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-12T15:23:30Z","updated_at":"2021-10-12T15:28:47Z","closed_at":"2021-10-12T15:28:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3065","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3065","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3065.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3065.patch","merged_at":"2021-10-12T15:28:46Z"},"body":"Fix the `datasets-cli` test command after the `prepare_module` change in #2986 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3065\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3065\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3064","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3064\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3064\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3064\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3064","id":1023900075,"node_id":"I_kwDODunzps49B3mr","number":3064,"title":"Make `interleave_datasets` more robust","user":{"login":"sbmaruf","id":32699797,"node_id":"MDQ6VXNlcjMyNjk5Nzk3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32699797?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sbmaruf","html_url":"https:\/\/github.com\/sbmaruf","followers_url":"https:\/\/api.github.com\/users\/sbmaruf\/followers","following_url":"https:\/\/api.github.com\/users\/sbmaruf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sbmaruf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sbmaruf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sbmaruf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sbmaruf\/orgs","repos_url":"https:\/\/api.github.com\/users\/sbmaruf\/repos","events_url":"https:\/\/api.github.com\/users\/sbmaruf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sbmaruf\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-12T14:34:53Z","updated_at":"2022-01-26T15:51:47Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nRight now there are few hiccups using `interleave_datasets`. Interleaved dataset iterates until the smallest dataset completes it's iterator. In this way larger datasets may not complete full epoch of iteration. \r\nIt creates new problems in calculation of epoch since there are no way to track which dataset from `interleave_datasets` completes how many epoch.\r\n\r\n**Describe the solution you'd like**\r\nFor `interleave_datasets` module, \r\n- [ ] Add a boolean argument `--stop-iter` in `interleave_datasets` that enables dataset to either iterate infinite amount of time or not. That means it should not return `StopIterator` exception in case `--stop-iter=False`.\r\n- [ ] Internal list variable `iter_cnt` that explains how many times (in steps\/epochs) each dataset iterates at a given point.\r\n- [ ] Add an argument `--max-iter` (list type) that explain maximum amount of time each of the dataset can iterate. After complete `--max-iter` of one dataset, other dataset should continue sampling and when all the dataset finish their respective `--max-iter`, only then return `StopIterator`\r\n\r\nNote: I'm new to `datasets` api. May be these features are already there in the datasets. \r\n\r\nSince multitask training is the latest trends, I believe this feature would make the `datasets` api more popular.\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3064\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3064\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3063","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3063\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3063\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3063\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3063","id":1023588297,"node_id":"I_kwDODunzps49ArfJ","number":3063,"title":"Windows CI is unable to test streaming properly because of SSL issues","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":3287858981,"node_id":"MDU6TGFiZWwzMjg3ODU4OTgx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/streaming","name":"streaming","color":"fef2c0","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-12T09:33:40Z","updated_at":"2021-10-19T17:11:52Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In https:\/\/github.com\/huggingface\/datasets\/pull\/3041 the windows tests were skipped because of SSL issues with moon-staging.huggingface.co:443\r\n\r\nThe issue appears only on windows with asyncio. On Linux it works. With requests it works as well. And with the production environment huggingface.co it also works.\r\n\r\nto reproduce on windows:\r\n```python\r\nimport fsspec\r\n\r\n# use any URL to a file in a dataset repo\r\nurl = \"https:\/\/moon-staging.huggingface.co\/datasets\/__DUMMY_TRANSFORMERS_USER__\/my-dataset-16242824690709\/resolve\/main\/.gitattributes\"\r\n\r\nfsspec.open(url).open()\r\n```\r\nraises\r\n```python\r\nFileNotFoundError: https:\/\/moon-staging.huggingface.co\/datasets\/__DUMMY_TRANSFORMERS_USER__\/my-dataset-16242824690709\/resolve\/main\/.gitattributes\r\n```\r\nbecause of\r\n```python\r\naiohttp.client_exceptions.ClientConnectorCertificateError: Cannot connect to host moon-staging.huggingface.co:443 ssl:True [SSLCertVerificationError: (1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: certificate has expired (_ssl.c:1131)')]\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3063\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3063\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3062","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3062\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3062\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3062\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3062","id":1023209592,"node_id":"PR_kwDODunzps4tCxfK","number":3062,"title":"Update summary on PyPi beyond NLP","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-11T23:27:46Z","updated_at":"2021-10-13T08:55:54Z","closed_at":"2021-10-13T08:55:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3062","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3062","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3062.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3062.patch","merged_at":"2021-10-13T08:55:53Z"},"body":"More than just NLP now","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3062\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3062\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3061","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3061\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3061\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3061\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3061","id":1023103119,"node_id":"I_kwDODunzps48-1CP","number":3061,"title":"Feature request : add leave=True to dataset.map to enable tqdm nested bars (and whilst we're at it couldn't we get a way to access directly tqdm underneath?) ","user":{"login":"BenoitDalFerro","id":69694610,"node_id":"MDQ6VXNlcjY5Njk0NjEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69694610?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BenoitDalFerro","html_url":"https:\/\/github.com\/BenoitDalFerro","followers_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/followers","following_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/orgs","repos_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/repos","events_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-11T20:49:49Z","updated_at":"2021-10-22T09:34:10Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**A clear and concise description of what you want to happen.**\r\n\r\nIt would be so nice to be able to nest HuggingFace `Datasets.map() ` progress bars in the grander scheme of things and whilst we're at it why not other functions.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nBy the way is there not a way to directly interact with underlying tqdm module ? **kwargs-ish?\r\n\r\n**Additional context**\r\n\r\nFurthering tqdm integration #2374 and huggingface\/transformers#11797 solutioned by huggingface\/transformers#12226 provided with tqdm description as `desc=`\r\n\r\n@sgugger @bhavitvyamalik","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3061\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3061\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3060","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3060\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3060\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3060\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3060","id":1022936396,"node_id":"I_kwDODunzps48-MVM","number":3060,"title":"load_dataset('openwebtext') yields \"Compressed file ended before the end-of-stream marker was reached\"","user":{"login":"RylanSchaeffer","id":8942987,"node_id":"MDQ6VXNlcjg5NDI5ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8942987?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RylanSchaeffer","html_url":"https:\/\/github.com\/RylanSchaeffer","followers_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/followers","following_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/orgs","repos_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/repos","events_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-10-11T17:05:27Z","updated_at":"2021-10-28T05:52:21Z","closed_at":"2021-10-28T05:52:21Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen I try `load_dataset('openwebtext')`, I receive a \"EOFError: Compressed file ended before the end-of-stream marker was reached\" error.\r\n\r\n## Steps to reproduce the bug\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('openwebtext')\r\n```\r\n\r\n## Expected results\r\nI expect the `dataset` variable to be properly constructed.\r\n\r\n## Actual results\r\n\r\n```\r\nFile \"\/home\/rschaef\/CoCoSci-Language-Distillation\/distillation_v2\/ratchet_learning\/tasks\/base.py\", line 37, in create_dataset\r\n dataset_str,\r\n File \"\/home\/rschaef\/CoCoSci-Language-Distillation\/cocosci\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 1117, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/rschaef\/CoCoSci-Language-Distillation\/cocosci\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 637, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/rschaef\/CoCoSci-Language-Distillation\/cocosci\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 704, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/rschaef\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/openwebtext\/85b3ae7051d2d72e7c5fdf6dfb462603aaa26e9ed506202bf3a24d261c6c40a1\/openwebtext.py\", line 61, in _split_generators\r\n dl_dir = dl_manager.download_and_extract(_URL)\r\n File \"\/home\/rschaef\/CoCoSci-Language-Distillation\/cocosci\/lib\/python3.6\/site-packages\/datasets\/utils\/download_manager.py\", line 284, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/home\/rschaef\/CoCoSci-Language-Distillation\/cocosci\/lib\/python3.6\/site-packages\/datasets\/utils\/download_manager.py\", line 261, in extract\r\n partial(cached_path, download_config=download_config), path_or_paths, num_proc=num_proc, disable_tqdm=False\r\n File \"\/home\/rschaef\/CoCoSci-Language-Distillation\/cocosci\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 197, in map_nested\r\n return function(data_struct)\r\n File \"\/home\/rschaef\/CoCoSci-Language-Distillation\/cocosci\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 316, in cached_path\r\n output_path, force_extract=download_config.force_extract\r\n File \"\/home\/rschaef\/CoCoSci-Language-Distillation\/cocosci\/lib\/python3.6\/site-packages\/datasets\/utils\/extract.py\", line 40, in extract\r\n self.extractor.extract(input_path, output_path, extractor=extractor)\r\n File \"\/home\/rschaef\/CoCoSci-Language-Distillation\/cocosci\/lib\/python3.6\/site-packages\/datasets\/utils\/extract.py\", line 179, in extract\r\n return extractor.extract(input_path, output_path)\r\n File \"\/home\/rschaef\/CoCoSci-Language-Distillation\/cocosci\/lib\/python3.6\/site-packages\/datasets\/utils\/extract.py\", line 53, in extract\r\n tar_file.extractall(output_path)\r\n File \"\/usr\/lib\/python3.6\/tarfile.py\", line 2010, in extractall\r\n numeric_owner=numeric_owner)\r\n File \"\/usr\/lib\/python3.6\/tarfile.py\", line 2052, in extract\r\n numeric_owner=numeric_owner)\r\n File \"\/usr\/lib\/python3.6\/tarfile.py\", line 2122, in _extract_member\r\n self.makefile(tarinfo, targetpath)\r\n File \"\/usr\/lib\/python3.6\/tarfile.py\", line 2171, in makefile\r\n copyfileobj(source, target, tarinfo.size, ReadError, bufsize)\r\n File \"\/usr\/lib\/python3.6\/tarfile.py\", line 249, in copyfileobj\r\n buf = src.read(bufsize)\r\n File \"\/usr\/lib\/python3.6\/lzma.py\", line 200, in read\r\n return self._buffer.read(size)\r\n File \"\/usr\/lib\/python3.6\/_compression.py\", line 68, in readinto\r\n data = self.read(len(byte_view))\r\n File \"\/usr\/lib\/python3.6\/_compression.py\", line 99, in read\r\n raise EOFError(\"Compressed file ended before the \"\r\npython-BaseException\r\nEOFError: Compressed file ended before the end-of-stream marker was reached\r\n```\r\n\r\n\r\n## Environment info\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-4.4.0-173-generic-x86_64-with-Ubuntu-16.04-xenial\r\n- Python version: 3.6.10\r\n- PyArrow version: 5.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3060\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3060\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3059","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3059\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3059\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3059\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3059","id":1022620057,"node_id":"PR_kwDODunzps4tA54w","number":3059,"title":"Fix task reloading from cache","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-11T12:03:04Z","updated_at":"2021-10-11T12:23:39Z","closed_at":"2021-10-11T12:23:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3059","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3059","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3059.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3059.patch","merged_at":"2021-10-11T12:23:38Z"},"body":"When reloading a dataset from the cache when doing `map`, the tasks templates were kept instead of being updated regarding the output of the `map` function. This is an issue because we drop the tasks templates that are not compatible anymore after `map`, for example if a column of the template was removed.\r\n\r\nThis PR fixes this and for convenience introduces a decorator `@transmit_tasks` that takes care of doing this verification, similar to the `@transmit_format` decorator.\r\n\r\nThis should fix issue https:\/\/github.com\/huggingface\/datasets\/issues\/3047 cc @sgugger ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3059\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":2,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3059\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3058","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3058\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3058\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3058\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3058","id":1022612664,"node_id":"I_kwDODunzps4889S4","number":3058,"title":"Dataset wikipedia and Bookcorpusopen cannot be fetched from dataloader.","user":{"login":"hobbitlzy","id":35392624,"node_id":"MDQ6VXNlcjM1MzkyNjI0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35392624?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hobbitlzy","html_url":"https:\/\/github.com\/hobbitlzy","followers_url":"https:\/\/api.github.com\/users\/hobbitlzy\/followers","following_url":"https:\/\/api.github.com\/users\/hobbitlzy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hobbitlzy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hobbitlzy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hobbitlzy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hobbitlzy\/orgs","repos_url":"https:\/\/api.github.com\/users\/hobbitlzy\/repos","events_url":"https:\/\/api.github.com\/users\/hobbitlzy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hobbitlzy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-11T11:54:59Z","updated_at":"2022-01-19T14:03:49Z","closed_at":"2022-01-19T14:03:49Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI have used the previous version of `transformers` and `datasets`. The dataset `wikipedia` can be successfully used. Recently, I upgrade them to the newest version and find it raises errors. I also tried other datasets. The `wikitext` works and the `bookcorpusopen` raises the same errors as `wikipedia`.\r\n\r\n## Steps to reproduce the bug\r\nRun the `run_mlm_no_trainer.py` and the given script on this [link](https:\/\/github.com\/huggingface\/transformers\/tree\/master\/examples\/pytorch\/language-modeling). Change the dataset from wikitext to wikipedia or bookcorpusopen. BTW, the library transformers is of version 4.11.3.\r\n\r\n## Expected results\r\nThe data batchs are fetched from the data loader and train.\r\n\r\n## Actual results\r\nThe first time to fetch data batch occurs error.\r\n`Traceback (most recent call last):\r\n File \"\/home\/zyli\/anaconda3\/envs\/LatestStacking\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 705, in convert_to_tensors\r\n tensor = as_tensor(value)\r\nValueError: too many dimensions 'str'\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"src\/original_run_mlm_no_trainer.py\", line 528, in \r\n main()\r\n File \"src\/original_run_mlm_no_trainer.py\", line 488, in main\r\n for step, batch in enumerate(train_dataloader):\r\n File \"\/home\/zyli\/anaconda3\/envs\/LatestStacking\/lib\/python3.7\/site-packages\/accelerate\/data_loader.py\", line 303, in __iter__\r\n for batch in super().__iter__():\r\n File \"\/home\/zyli\/anaconda3\/envs\/LatestStacking\/lib\/python3.7\/site-packages\/torch\/utils\/data\/dataloader.py\", line 517, in __next__\r\n data = self._next_data()\r\n File \"\/home\/zyli\/anaconda3\/envs\/LatestStacking\/lib\/python3.7\/site-packages\/torch\/utils\/data\/dataloader.py\", line 557, in _next_data\r\n data = self._dataset_fetcher.fetch(index) # may raise StopIteration\r\n File \"\/home\/zyli\/anaconda3\/envs\/LatestStacking\/lib\/python3.7\/site-packages\/torch\/utils\/data\/_utils\/fetch.py\", line 47, in fetch\r\n return self.collate_fn(data)\r\n File \"\/home\/zyli\/anaconda3\/envs\/LatestStacking\/lib\/python3.7\/site-packages\/transformers\/data\/data_collator.py\", line 41, in __call__\r\n return self.torch_call(features)\r\n File \"\/home\/zyli\/anaconda3\/envs\/LatestStacking\/lib\/python3.7\/site-packages\/transformers\/data\/data_collator.py\", line 671, in torch_call\r\n batch = self.tokenizer.pad(examples, return_tensors=\"pt\", pad_to_multiple_of=self.pad_to_multiple_of)\r\n File \"\/home\/zyli\/anaconda3\/envs\/LatestStacking\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 2774, in pad\r\n return BatchEncoding(batch_outputs, tensor_type=return_tensors)\r\n File \"\/home\/zyli\/anaconda3\/envs\/LatestStacking\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 210, in __init__\r\n self.convert_to_tensors(tensor_type=tensor_type, prepend_batch_axis=prepend_batch_axis)\r\n File \"\/home\/zyli\/anaconda3\/envs\/LatestStacking\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 722, in convert_to_tensors\r\n \"Unable to create tensor, you should probably activate truncation and\/or padding \"\r\nValueError: Unable to create tensor, you should probably activate truncation and\/or padding with 'padding=True' 'truncation=True' to have batched tensors with the same length.\r\n`\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-5.8.0-59-generic-x86_64-with-debian-bullseye-sid\r\n- Python version: 3.7.6\r\n- PyArrow version: 5.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3058\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3058\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3057","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3057\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3057\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3057\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3057","id":1022508315,"node_id":"I_kwDODunzps488j0b","number":3057,"title":"Error in per class precision computation ","user":{"login":"tidhamecha2","id":38906722,"node_id":"MDQ6VXNlcjM4OTA2NzIy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38906722?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tidhamecha2","html_url":"https:\/\/github.com\/tidhamecha2","followers_url":"https:\/\/api.github.com\/users\/tidhamecha2\/followers","following_url":"https:\/\/api.github.com\/users\/tidhamecha2\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tidhamecha2\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tidhamecha2\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tidhamecha2\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tidhamecha2\/orgs","repos_url":"https:\/\/api.github.com\/users\/tidhamecha2\/repos","events_url":"https:\/\/api.github.com\/users\/tidhamecha2\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tidhamecha2\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-11T10:05:19Z","updated_at":"2021-10-11T10:17:44Z","closed_at":"2021-10-11T10:16:16Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen trying to get the per class precision values by providing `average=None`, following error is thrown `ValueError: can only convert an array of size 1 to a Python scalar`\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset, load_metric\r\nprecision_metric = load_metric(\"precision\")\r\npredictions = [0, 2, 1, 0, 0, 1]\r\nreferences = [0, 1, 2, 0, 1, 2]\r\nresults = precision_metric.compute(predictions=predictions, references=references, average=None)\r\n```\r\n\r\n## Expected results\r\n` {'precision': array([0.66666667, 0. , 0. ])}`\r\nas per https:\/\/github.com\/huggingface\/datasets\/blob\/master\/metrics\/precision\/precision.py\r\n\r\n## Actual results\r\n```\r\n output = self._compute(predictions=predictions, references=references, **kwargs)\r\n File \"~\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/precision\/94709a71c6fe37171ef49d3466fec24dee9a79846c9f176dff66a649e9811690\/precision.py\", line 110, in _compute\r\n sample_weight=sample_weight,\r\nValueError: can only convert an array of size 1 to a Python scalar\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: linux\r\n- Python version: 3.6.9\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3057\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3057\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3056","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3056\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3056\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3056\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3056","id":1022345564,"node_id":"PR_kwDODunzps4tAB9h","number":3056,"title":"Fix meteor metric for version >= 3.6.4","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-11T07:11:44Z","updated_at":"2021-10-11T07:29:20Z","closed_at":"2021-10-11T07:29:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3056","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3056","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3056.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3056.patch","merged_at":"2021-10-11T07:29:19Z"},"body":"After `nltk` update, the meteor metric expects pre-tokenized inputs (breaking change).\r\n\r\nThis PR fixes this issue, while maintaining compatibility with older versions.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3056\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3056\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3055","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3055\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3055\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3055\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3055","id":1022319238,"node_id":"I_kwDODunzps4871qG","number":3055,"title":"CI test suite fails after meteor metric update","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-11T06:37:12Z","updated_at":"2021-10-11T07:30:31Z","closed_at":"2021-10-11T07:30:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nCI test suite fails: https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/8110\/workflows\/f059ba43-9154-4632-bebb-82318447ddc9\/jobs\/50010\r\n\r\nStack trace:\r\n```\r\n___________________ LocalMetricTest.test_load_metric_meteor ____________________\r\n[gw1] linux -- Python 3.6.15 \/home\/circleci\/.pyenv\/versions\/3.6.15\/bin\/python3.6\r\n\r\nself = \r\nmetric_name = 'meteor'\r\n\r\n def test_load_metric(self, metric_name):\r\n doctest.ELLIPSIS_MARKER = \"[...]\"\r\n metric_module = importlib.import_module(datasets.load.prepare_module(os.path.join(\"metrics\", metric_name))[0])\r\n metric = datasets.load.import_main_class(metric_module.__name__, dataset=False)\r\n # check parameters\r\n parameters = inspect.signature(metric._compute).parameters\r\n self.assertTrue(\"predictions\" in parameters)\r\n self.assertTrue(\"references\" in parameters)\r\n self.assertTrue(all([p.kind != p.VAR_KEYWORD for p in parameters.values()])) # no **kwargs\r\n # run doctest\r\n with self.patch_intensive_calls(metric_name, metric_module.__name__):\r\n with self.use_local_metrics():\r\n> results = doctest.testmod(metric_module, verbose=True, raise_on_error=True)\r\n\r\ntests\/test_metric_common.py:75: \r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \r\n..\/.pyenv\/versions\/3.6.15\/lib\/python3.6\/doctest.py:1951: in testmod\r\n runner.run(test)\r\n..\/.pyenv\/versions\/3.6.15\/lib\/python3.6\/doctest.py:1839: in run\r\n r = DocTestRunner.run(self, test, compileflags, out, False)\r\n..\/.pyenv\/versions\/3.6.15\/lib\/python3.6\/doctest.py:1476: in run\r\n return self.__run(test, compileflags, out)\r\n..\/.pyenv\/versions\/3.6.15\/lib\/python3.6\/doctest.py:1382: in __run\r\n exception)\r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \r\n\r\nself = \r\nout = \r\ntest = \r\nexample = \r\nexc_info = (, TypeError('\"hypothesis\" expects pre-tokenized hypothesis (Iterable[str]): It is a guide to action which ensures that the military always obeys the commands of the party',), )\r\n\r\n def report_unexpected_exception(self, out, test, example, exc_info):\r\n> raise UnexpectedException(test, example, exc_info)\r\nE doctest.UnexpectedException: \r\n\r\n..\/.pyenv\/versions\/3.6.15\/lib\/python3.6\/doctest.py:1845: UnexpectedException\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3055\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3055\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3054","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3054\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3054\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3054\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3054","id":1022108186,"node_id":"PR_kwDODunzps4s_TmE","number":3054,"title":"Update Biosses","user":{"login":"bwang482","id":6764450,"node_id":"MDQ6VXNlcjY3NjQ0NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6764450?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bwang482","html_url":"https:\/\/github.com\/bwang482","followers_url":"https:\/\/api.github.com\/users\/bwang482\/followers","following_url":"https:\/\/api.github.com\/users\/bwang482\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bwang482\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bwang482\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bwang482\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bwang482\/orgs","repos_url":"https:\/\/api.github.com\/users\/bwang482\/repos","events_url":"https:\/\/api.github.com\/users\/bwang482\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bwang482\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-10T22:25:12Z","updated_at":"2021-10-13T09:04:27Z","closed_at":"2021-10-13T09:04:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3054","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3054","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3054.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3054.patch","merged_at":"2021-10-13T09:04:27Z"},"body":"Fix variable naming","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3054\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3054\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3053","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3053\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3053\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3053\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3053","id":1022076905,"node_id":"I_kwDODunzps4866fp","number":3053,"title":"load_dataset('the_pile_openwebtext2') produces ArrowInvalid, value too large to fit in C integer type","user":{"login":"davidbau","id":3458792,"node_id":"MDQ6VXNlcjM0NTg3OTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3458792?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/davidbau","html_url":"https:\/\/github.com\/davidbau","followers_url":"https:\/\/api.github.com\/users\/davidbau\/followers","following_url":"https:\/\/api.github.com\/users\/davidbau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/davidbau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/davidbau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/davidbau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/davidbau\/orgs","repos_url":"https:\/\/api.github.com\/users\/davidbau\/repos","events_url":"https:\/\/api.github.com\/users\/davidbau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/davidbau\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-10-10T19:55:21Z","updated_at":"2022-01-19T14:04:12Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen loading `the_pile_openwebtext2`, we get the error `pyarrow.lib.ArrowInvalid: Value 2111 too large to fit in C integer type`\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\nds = datasets.load_dataset('the_pile_openwebtext2')\r\n```\r\n\r\n## Expected results\r\nShould download the dataset, convert it to an arrow file, and return a working Dataset object.\r\n\r\n## Actual results\r\nThe download works, but conversion to the arrow file fails as follows:\r\n\r\n```\r\n>>> ds = datasets.load_dataset('the_pile_openwebtext2')\r\nDownloading and preparing dataset openwebtext2\/plain_text (download: 27.33 GiB, generated: 63.86 GiB\r\n, post-processed: Unknown size, total: 91.19 GiB) to \/home\/davidbau\/.cache\/huggingface\/datasets\/open\r\nwebtext2\/plain_text\/1.0.0\/c48ec73ba3483bac673463f48f67e9a4fd8cb49a9d6ec4fb957f0b424b97cf25...\r\nTraceback (most recent call last):\r\n File \"\/home\/davidbau\/.conda\/envs\/tenv\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 1133,\r\n in _prepare_split\r\n writer.write(example, key)\r\n File \"\/home\/davidbau\/.conda\/envs\/tenv\/lib\/python3.9\/site-packages\/datasets\/arrow_writer.py\", line\r\n366, in write\r\n self.write_examples_on_file()\r\n File \"\/home\/davidbau\/.conda\/envs\/tenv\/lib\/python3.9\/site-packages\/datasets\/arrow_writer.py\", line\r\n311, in write_examples_on_file\r\n pa_array = pa.array(typed_sequence)\r\n File \"pyarrow\/array.pxi\", line 222, in pyarrow.lib.array\r\n File \"pyarrow\/array.pxi\", line 110, in pyarrow.lib._handle_arrow_array_protocol\r\n File \"\/home\/davidbau\/.conda\/envs\/tenv\/lib\/python3.9\/site-packages\/datasets\/arrow_writer.py\", line\r\n115, in __arrow_array__\r\n out = pa.array(cast_to_python_objects(self.data, only_1d_for_numpy=True), type=type)\r\n File \"pyarrow\/array.pxi\", line 305, in pyarrow.lib.array\r\n File \"pyarrow\/array.pxi\", line 39, in pyarrow.lib._sequence_to_array\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Value 2111 too large to fit in C integer type\r\n## Environment info\r\n\r\n- `datasets` version:\r\n```\r\n\r\n- Platform: Ubuntu 20.04\r\n- Python version: python 3.9\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3053\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3053\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3052","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3052\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3052\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3052\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3052","id":1021944435,"node_id":"I_kwDODunzps486aJz","number":3052,"title":"load_dataset cannot download the data and hangs on forever if cache dir specified","user":{"login":"BenoitDalFerro","id":69694610,"node_id":"MDQ6VXNlcjY5Njk0NjEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69694610?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BenoitDalFerro","html_url":"https:\/\/github.com\/BenoitDalFerro","followers_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/followers","following_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/orgs","repos_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/repos","events_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BenoitDalFerro\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-10T10:31:36Z","updated_at":"2021-10-11T10:57:09Z","closed_at":"2021-10-11T10:56:36Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAfter updating datasets, a code that ran just fine for ages began to fail. Specifying _datasets.load_dataset_'s _cache_dir_ optional argument on Windows 10 machine results in data download to hang on forever. Same call without cache_dir works just fine. Surprisingly exact same code just runs perfectly fine on Linux docker instance running in cloud.\r\n\r\nUnfortunately I updated Windows also at the same time and I can't remember which version of datasets was running in my conda environment prior to the update otherwise I would have tried both to check this out. :(\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\n```\r\ncache_dir = 'c:\/data\/datasets'\r\ndataset = load_dataset('wikipedia', '20200501.en', split='train',cache_dir=cache_dir) \r\n```\r\nNote that exact same code without specifying _cache_dir_ argument works perfectly fine.\r\n```\r\ncache_dir = 'c:\/data\/datasets'\r\ndataset = load_dataset('wikipedia', '20200501.en', split='train') \r\n```\r\n\r\n## Expected results\r\nDownloads the dataset and cache is handled in the _cache_dir_ directory\r\n## Actual results\r\nData download keeps hanging on forever, **NO TRACEBACK**!\r\n\r\n## Environment info\r\n- `datasets` version: 1.12.1\r\n- Platform: Windows-10-10.0.19042-SP0\r\n- Python version: 3.8.11\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3052\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3052\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3051","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3051\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3051\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3051\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3051","id":1021852234,"node_id":"I_kwDODunzps486DpK","number":3051,"title":"Non-Matching Checksum Error with crd3 dataset","user":{"login":"RylanSchaeffer","id":8942987,"node_id":"MDQ6VXNlcjg5NDI5ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8942987?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RylanSchaeffer","html_url":"https:\/\/github.com\/RylanSchaeffer","followers_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/followers","following_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/orgs","repos_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/repos","events_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RylanSchaeffer\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-10T01:32:43Z","updated_at":"2021-10-31T04:32:32Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen I try loading the crd3 dataset (https:\/\/huggingface.co\/datasets\/crd3), an error is thrown.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset = load_dataset('crd3', split='train')\r\n```\r\n\r\n## Expected results\r\nI expect no error to be thrown.\r\n\r\n\r\n## Actual results\r\nA non-matching checksum error is thrown.\r\n\r\n```\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/github.com\/RevanthRameshkumar\/CRD3\/archive\/master.zip']\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-4.4.0-173-generic-x86_64-with-Ubuntu-16.04-xenial\r\n- Python version: 3.6.10\r\n- PyArrow version: 5.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3051\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3051\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3050","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3050\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3050\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3050\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3050","id":1021772622,"node_id":"PR_kwDODunzps4s-anK","number":3050,"title":"Fix streaming: catch Timeout error","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-10-09T18:19:20Z","updated_at":"2021-10-12T15:28:18Z","closed_at":"2021-10-11T09:35:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3050","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3050","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3050.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3050.patch","merged_at":"2021-10-11T09:35:38Z"},"body":"Catches Timeout error during streaming.\r\n\r\nfix #3049 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3050\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3050\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3049","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3049\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3049\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3049\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3049","id":1021770008,"node_id":"I_kwDODunzps485vkY","number":3049,"title":"TimeoutError during streaming","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-09T18:06:51Z","updated_at":"2021-10-11T09:35:38Z","closed_at":"2021-10-11T09:35:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI got a TimeoutError after streaming for about 10h.\r\n\r\n## Steps to reproduce the bug\r\nVery long code but we could do a test of streaming indefinitely data, though error may take a while to appear.\r\n\r\n## Expected results\r\nThis error was not expected in the code which considers only `ClientError` but not `TimeoutError`.\r\nSee [this line](https:\/\/github.com\/huggingface\/datasets\/blob\/2814fbd0e18150be409f10804670e98d9ecb87d4\/src\/datasets\/utils\/streaming_download_manager.py#L129).\r\nBased on the traceback, it looks like the `TimeoutError` was not captured.\r\n\r\n## Actual results\r\n```\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/fsspec\/asyn.py\", line 25, in _runner\r\n result[0] = await coro\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/fsspec\/implementations\/http.py\", line 614, in async_fetch_range\r\n out = await r.read()\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/aiohttp\/client_reqrep.py\", line 1032, in read\r\n self._body = await self.content.read()\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/aiohttp\/streams.py\", line 370, in read\r\n block = await self.readany()\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/aiohttp\/streams.py\", line 392, in readany\r\n await self._wait(\"readany\")\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/aiohttp\/streams.py\", line 306, in _wait\r\n await waiter\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/aiohttp\/helpers.py\", line 656, in __exit__\r\n raise asyncio.TimeoutError from None\r\nasyncio.exceptions.TimeoutError\r\nThe above exception was the direct cause of the following exception:\r\nTraceback (most recent call last):\r\n File \"\/home\/koush\/dalle-mini\/dev\/seq2seq\/run_seq2seq_flax.py\", line 1027, in \r\n main()\r\n File \"\/home\/koush\/dalle-mini\/dev\/seq2seq\/run_seq2seq_flax.py\", line 991, in main\r\n for batch in tqdm(\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/tqdm\/std.py\", line 1180, in __iter__\r\n for obj in iterable:\r\n File \"\/home\/koush\/dalle-mini\/dev\/seq2seq\/run_seq2seq_flax.py\", line 376, in data_loader_streaming\r\n for item in dataset:\r\n File \"\/home\/koush\/datasets\/src\/datasets\/iterable_dataset.py\", line 341, in __iter__\r\n for key, example in self._iter():\r\n File \"\/home\/koush\/datasets\/src\/datasets\/iterable_dataset.py\", line 338, in _iter\r\n yield from ex_iterable\r\n File \"\/home\/koush\/datasets\/src\/datasets\/iterable_dataset.py\", line 179, in __iter__\r\n key_examples_list = [(key, example)] + [\r\n File \"\/home\/koush\/datasets\/src\/datasets\/iterable_dataset.py\", line 179, in \r\n key_examples_list = [(key, example)] + [\r\n File \"\/home\/koush\/datasets\/src\/datasets\/iterable_dataset.py\", line 176, in __iter__\r\n for key, example in iterator:\r\n File \"\/home\/koush\/datasets\/src\/datasets\/iterable_dataset.py\", line 225, in __iter__\r\n for x in self.ex_iterable:\r\n File \"\/home\/koush\/datasets\/src\/datasets\/iterable_dataset.py\", line 99, in __iter__\r\n for key, example in self.generate_examples_fn(**kwargs_with_shuffled_shards):\r\n File \"\/home\/koush\/datasets\/src\/datasets\/iterable_dataset.py\", line 287, in wrapper\r\n for key, table in generate_tables_fn(**kwargs):\r\n File \"\/home\/koush\/datasets\/src\/datasets\/packaged_modules\/json\/json.py\", line 107, in _generate_tables\r\n batch = f.read(self.config.chunksize)\r\n File \"\/home\/koush\/datasets\/src\/datasets\/utils\/streaming_download_manager.py\", line 126, in read_with_retries\r\n out = read(*args, **kwargs)\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/fsspec\/implementations\/http.py\", line 572, in read\r\n return super().read(length)\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/fsspec\/spec.py\", line 1533, in read\r\n out = self.cache._fetch(self.loc, self.loc + length)\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/fsspec\/caching.py\", line 390, in _fetch\r\n self.cache = self.fetcher(start, bend)\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/fsspec\/asyn.py\", line 91, in wrapper\r\n return sync(self.loop, func, *args, **kwargs)\r\n File \"\/home\/koush\/.pyenv\/versions\/dev\/lib\/python3.9\/site-packages\/fsspec\/asyn.py\", line 69, in sync\r\n raise FSTimeoutError from return_result\r\nfsspec.exceptions.FSTimeoutError\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.2.dev0\r\n- Platform: Linux-5.4.0-1043-gcp-x86_64-with-glibc2.31\r\n- Python version: 3.9.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3049\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3049\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3048","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3048\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3048\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3048\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3048","id":1021765661,"node_id":"I_kwDODunzps485ugd","number":3048,"title":"Identify which shard data belongs to","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-09T17:46:35Z","updated_at":"2021-10-09T20:24:17Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nI'm training on a large dataset made of multiple sub-datasets.\r\nDuring training I can observe some jumps in loss which may correspond to different shards.\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/715491\/136668758-521263aa-a9b2-4ad2-8d22-060b6bf86a1c.png)\r\n\r\nMy suspicion is that either:\r\n* some of the sub-datasets are harder for the model than others\r\n* some of the sub-datasets are not formatted properly\r\n\r\nI'd like to identify which shards correspond to those jumps.\r\n\r\n**Describe the solution you'd like**\r\n\r\nIt would be nice to have a key associated to each data sample or data batch containing details on where the data comes from (shard idx + item idx within the shard).\r\nThis should be supported both in local and streaming mode.\r\n\r\n**Describe alternatives you've considered**\r\nA\u00a0fix would be for me to add myself details (shard id, sample id) as part of each data sample.\r\nThe inconvenient is that it requires users to process\/reupload every dataset when they need this feature.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3048\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3048\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3047","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3047\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3047\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3047\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3047","id":1021360616,"node_id":"I_kwDODunzps484Lno","number":3047,"title":"Loading from cache a dataset for LM built from a text classification dataset sometimes errors","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-10-08T18:23:11Z","updated_at":"2021-11-03T17:13:08Z","closed_at":"2021-11-03T17:13:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nYes, I know, that description sucks. So the problem is arising in the course when we build a masked language modeling dataset using the IMDB dataset. To reproduce (or try since it's a bit fickle).\r\n\r\nCreate a dataset for masled-language modeling from the IMDB dataset.\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom transformers import Autotokenizer\r\n\r\ntokenizer = AutoTokenizer.from_pretrained(\"distilbert-base-cased)\r\nimdb_dataset = load_dataset(\"imdb\", split=\"train\")\r\n\r\ndef tokenize_function(examples):\r\n return tokenizer(examples[\"text\"])\r\n\r\ntokenized_dataset = imdb_dataset.map(\r\n tokenize_function, batched=True, remove_columns=[\"text\", \"label\"]\r\n)\r\n\r\nchunk_size = 128\r\n\r\ndef group_texts(examples):\r\n # Concatenate all texts.\r\n concatenated_examples = {k: sum(examples[k], []) for k in examples.keys()}\r\n # Compute length of concatenated texts\r\n total_length = len(concatenated_examples[list(examples.keys())[0]])\r\n # We drop the last chunk if it's smaller than chunk_size\r\n total_length = (total_length \/\/ chunk_size) * chunk_size\r\n # Split by chunks of max_len.\r\n result = {\r\n k: [t[i : i + chunk_size] for i in range(0, total_length, chunk_size)]\r\n for k, t in concatenated_examples.items()\r\n }\r\n # Create a new labels column\r\n result[\"labels\"] = result[\"input_ids\"].copy()\r\n return result\r\n\r\nlm_dataset = tokenized_dataset.map(group_texts, batched=True)\r\n```\r\n\r\nUntil now, all is well. The problem comes when you re-execute that code, more specifically:\r\n\r\n```python\r\ntokenized_dataset = imdb_dataset.map(\r\n tokenize_function, batched=True, remove_columns=[\"text\", \"label\"]\r\n)\r\nlm_dataset = tokenized_dataset.map(group_texts, batched=True)\r\n```\r\n\r\nTry several times if the bug doesn't appear instantly, or just each line at a time, ideally in a notebook\/Colab and you should get at some point:\r\n\r\n```python\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n in \r\n----> 1 lm_dataset = tokenized_dataset.map(group_texts, batched=True)\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint, desc)\r\n 1947 new_fingerprint=new_fingerprint,\r\n 1948 disable_tqdm=disable_tqdm,\r\n-> 1949 desc=desc,\r\n 1950 )\r\n 1951 else:\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 424 }\r\n 425 # apply actual function\r\n--> 426 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 427 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 428 # re-apply format to the output\r\n\r\n~\/git\/datasets\/src\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 404 # Call actual function\r\n 405 \r\n--> 406 out = func(self, *args, **kwargs)\r\n 407 \r\n 408 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, disable_tqdm, desc, cache_only)\r\n 2138 if os.path.exists(cache_file_name) and load_from_cache_file:\r\n 2139 logger.warning(\"Loading cached processed dataset at %s\", cache_file_name)\r\n-> 2140 info = self.info.copy()\r\n 2141 info.features = features\r\n 2142 return Dataset.from_file(cache_file_name, info=info, split=self.split)\r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in copy(self)\r\n 278 \r\n 279 def copy(self) -> \"DatasetInfo\":\r\n--> 280 return self.__class__(**{k: copy.deepcopy(v) for k, v in self.__dict__.items()})\r\n 281 \r\n 282 \r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in __init__(self, description, citation, homepage, license, features, post_processed, supervised_keys, task_templates, builder_name, config_name, version, splits, download_checksums, download_size, post_processing_size, dataset_size, size_in_bytes)\r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in __post_init__(self)\r\n 177 for idx, template in enumerate(self.task_templates):\r\n 178 if isinstance(template, TextClassification):\r\n--> 179 labels = self.features[template.label_column].names\r\n 180 self.task_templates[idx] = TextClassification(\r\n 181 text_column=template.text_column, label_column=template.label_column, labels=labels\r\n\r\nKeyError: 'label'\r\n```\r\n\r\nIt seems that when loading the cache, the dataset tries to access some kind of text classification template (which I imagine comes from the original dataset) and to look at a key that has since been removed.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3047\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3047\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3046","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3046\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3046\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3046\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3046","id":1021021368,"node_id":"PR_kwDODunzps4s8MjS","number":3046,"title":"Fix MedDialog metadata JSON","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-08T12:04:40Z","updated_at":"2021-10-11T07:46:43Z","closed_at":"2021-10-11T07:46:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3046","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3046","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3046.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3046.patch","merged_at":"2021-10-11T07:46:42Z"},"body":"Fix #2969.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3046\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3046\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3045","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3045\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3045\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3045\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3045","id":1020968704,"node_id":"PR_kwDODunzps4s8B2b","number":3045,"title":"Fix inconsistent caching behaviour in Dataset.map() with multiprocessing #3044","user":{"login":"vlievin","id":9859840,"node_id":"MDQ6VXNlcjk4NTk4NDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9859840?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vlievin","html_url":"https:\/\/github.com\/vlievin","followers_url":"https:\/\/api.github.com\/users\/vlievin\/followers","following_url":"https:\/\/api.github.com\/users\/vlievin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vlievin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vlievin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vlievin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vlievin\/orgs","repos_url":"https:\/\/api.github.com\/users\/vlievin\/repos","events_url":"https:\/\/api.github.com\/users\/vlievin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vlievin\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-10-08T10:59:21Z","updated_at":"2021-10-21T16:58:32Z","closed_at":"2021-10-21T14:22:44Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3045","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3045","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3045.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3045.patch","merged_at":null},"body":"Fix #3044\r\n\r\n1. A rough unit test that fails without the fix. It probably doesn't comply with your code standards, but that just to draft the idea.\r\n2. A one liner fix ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3045\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3045\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3044","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3044\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3044\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3044\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3044","id":1020869778,"node_id":"I_kwDODunzps482TyS","number":3044,"title":"Inconsistent caching behaviour when using `Dataset.map()` with a `new_fingerprint` and `num_proc>1`","user":{"login":"vlievin","id":9859840,"node_id":"MDQ6VXNlcjk4NTk4NDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9859840?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vlievin","html_url":"https:\/\/github.com\/vlievin","followers_url":"https:\/\/api.github.com\/users\/vlievin\/followers","following_url":"https:\/\/api.github.com\/users\/vlievin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vlievin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vlievin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vlievin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vlievin\/orgs","repos_url":"https:\/\/api.github.com\/users\/vlievin\/repos","events_url":"https:\/\/api.github.com\/users\/vlievin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vlievin\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-08T09:07:10Z","updated_at":"2021-10-27T08:40:58Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nCaching does not work when using `Dataset.map()` with:\r\n1. a function that cannot be deterministically fingerprinted \r\n2. `num_proc>1`\r\n3. using a custom fingerprint set with the argument `new_fingerprint`. \r\n\r\nThis means that the dataset will be mapped with the function for each and every call, which does not happen if `num_proc==1`. In that case (`num_proc==1`) subsequent calls will load the transformed dataset from the cache, which is the expected behaviour. The example can easily be translated into a unit test.\r\n\r\nI have a fix and will submit a pull request asap. \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport hashlib\r\nimport json\r\nimport os\r\nfrom typing import Dict, Any\r\n\r\nimport numpy as np\r\nfrom datasets import load_dataset, Dataset\r\n\r\nBatch = Dict[str, Any]\r\nfilename = 'example.json'\r\n\r\n\r\nclass Transformation():\r\n \"\"\"A transformation with a random state that cannot be fingerprinted\"\"\"\r\n\r\n def __init__(self):\r\n self.state = np.random.random()\r\n\r\n def __call__(self, batch: Batch) -> Batch:\r\n batch['x'] = [np.random.random() for _ in batch['x']]\r\n return batch\r\n\r\n\r\ndef generate_dataset():\r\n \"\"\"generate a simple dataset\"\"\"\r\n rgn = np.random.RandomState(24)\r\n data = {\r\n 'data': [{'x': float(y), 'y': -float(y)} for y in\r\n rgn.random(size=(1000,))]}\r\n if not os.path.exists(filename):\r\n with open(filename, 'w') as f:\r\n f.write(json.dumps(data))\r\n\r\n return filename\r\n\r\n\r\ndef process_dataset_with_cache(num_proc=1, remove_cache=False,\r\n cache_expected_to_exist=False):\r\n\r\n # load the generated dataset\r\n dset: Dataset = next(\r\n iter(load_dataset('json', data_files=filename, field='data').values()))\r\n new_fingerprint = hashlib.md5(\"static-id\".encode(\"utf8\")).hexdigest()\r\n\r\n # get the expected cached path\r\n cache_path = dset._get_cache_file_path(new_fingerprint)\r\n if remove_cache and os.path.exists(cache_path):\r\n os.remove(cache_path)\r\n\r\n # check that the cache exists, and print a statement\r\n # if was actually expected to exist\r\n cache_exist = os.path.exists(cache_path)\r\n print(f\"> cache file exists={cache_exist}\")\r\n if cache_expected_to_exist and not cache_exist:\r\n print(\"=== Cache does not exist! ====\")\r\n\r\n # apply the transformation with the new fingerprint\r\n dset = dset.map(\r\n Transformation(),\r\n batched=True,\r\n num_proc=num_proc,\r\n new_fingerprint=new_fingerprint,\r\n desc=\"mapping dataset with transformation\")\r\n\r\n\r\ngenerate_dataset()\r\n\r\nfor num_proc in [1, 2]:\r\n print(f\"# num_proc={num_proc}, first pass\")\r\n # first pass to generate the cache (always create a new cache here)\r\n process_dataset_with_cache(remove_cache=True,\r\n num_proc=num_proc,\r\n cache_expected_to_exist=False)\r\n print(f\"# num_proc={num_proc}, second pass\")\r\n # second pass, expects the cache to exist\r\n process_dataset_with_cache(remove_cache=False,\r\n num_proc=num_proc,\r\n cache_expected_to_exist=True)\r\n\r\nos.remove(filename)\r\n\r\n```\r\n\r\n## Expected results\r\nIn the above python example, with `num_proc=2`, the **cache file should exist in the second call** of `process_dataset_with_cache` (\"=== Cache does not exist! ====\" should not be printed). \r\nWhen the cache is successfully created, `map()` is called only one time.\r\n\r\n## Actual results\r\nIn the above python example, with `num_proc=2`, the **cache does not exist in the second call** of `process_dataset_with_cache` (this results in printing \"=== Cache does not exist! ====\"). \r\nBecause the cache doesn't exist, the `map()` method is executed a second time and the dataset is not loaded from the cache.\r\n\r\n## Environment info\r\n- `datasets` version: 1.12.1\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.8\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3044\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3044\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3043","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3043\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3043\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3043\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3043","id":1020252114,"node_id":"I_kwDODunzps48z8_S","number":3043,"title":"Add PASS dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-07T16:43:43Z","updated_at":"2022-01-20T16:50:47Z","closed_at":"2022-01-20T16:50:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** PASS\r\n- **Description:** An ImageNet replacement for self-supervised pretraining without humans\r\n- **Data:** https:\/\/www.robots.ox.ac.uk\/~vgg\/research\/pass\/ https:\/\/github.com\/yukimasano\/PASS\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3043\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3043\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3042","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3042\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3042\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3042\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3042","id":1020047289,"node_id":"PR_kwDODunzps4s5Lxo","number":3042,"title":"Improving elasticsearch integration","user":{"login":"ggdupont","id":5583410,"node_id":"MDQ6VXNlcjU1ODM0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5583410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ggdupont","html_url":"https:\/\/github.com\/ggdupont","followers_url":"https:\/\/api.github.com\/users\/ggdupont\/followers","following_url":"https:\/\/api.github.com\/users\/ggdupont\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ggdupont\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ggdupont\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ggdupont\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ggdupont\/orgs","repos_url":"https:\/\/api.github.com\/users\/ggdupont\/repos","events_url":"https:\/\/api.github.com\/users\/ggdupont\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ggdupont\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-07T13:28:35Z","updated_at":"2021-10-19T16:17:39Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3042","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3042","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3042.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3042.patch","merged_at":null},"body":"- adding murmurhash signature to sample in index\r\n- adding optional credentials for remote elasticsearch server\r\n- enabling sample update in index\r\n- upgrade the elasticsearch 7.10.1 python client\r\n- adding ElasticsearchBulider to instantiate a dataset from an index and a filtering query","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3042\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3042\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3041","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3041\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3041\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3041\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3041","id":1018911385,"node_id":"PR_kwDODunzps4s1ZAc","number":3041,"title":"Load private data files + use glob on ZIP archives for json\/csv\/etc. module inference","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-06T18:16:36Z","updated_at":"2021-10-12T15:25:48Z","closed_at":"2021-10-12T15:25:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3041","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3041","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3041.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3041.patch","merged_at":"2021-10-12T15:25:46Z"},"body":"As mentioned in https:\/\/github.com\/huggingface\/datasets\/issues\/3032 loading data files from private repository isn't working correctly because of the data files resolved.\r\n\r\n#2986 did a refactor of the data files resolver. I added authentication to it.\r\n\r\nI also improved it to glob inside ZIP archives to look for json\/csv\/etc. files and infer which dataset builder (json\/csv\/etc.) to use.\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/3032\r\n\r\nNote that #2986 needs to get merged first","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3041\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3041\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3040","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3040\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3040\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3040\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3040","id":1018782475,"node_id":"I_kwDODunzps48uWML","number":3040,"title":"[save_to_disk] Using `select()` followed by `save_to_disk` saves complete dataset making it hard to create dummy dataset","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-10-06T17:08:47Z","updated_at":"2021-11-02T15:41:08Z","closed_at":"2021-11-02T15:41:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nWhen only keeping a dummy size of a dataset (say the first 100 samples), and then saving it to disk to upload it in the following to the hub for easy demo\/use - not just the small dataset is saved but the whole dataset with an indices file. The problem with this is that the dataset is still very big.\r\n\r\n## Steps to reproduce the bug\r\n\r\nE.g. run the following:\r\n\r\n```python\r\nfrom datasets import load_dataset, save_to_disk\r\n\r\nnlp = load_dataset(\"glue\", \"mnli\", split=\"train\")\r\nnlp.save_to_disk(\"full\")\r\n\r\nnlp = nlp.select(range(100))\r\nnlp.save_to_disk(\"dummy\")\r\n```\r\n\r\nNow one can see that both `\"dummy\"` and `\"full\"` have the same size. This shouldn't be the case IMO.\r\n\r\n## Expected results\r\n\r\nIMO `\"dummy\"` should be much smaller so that one can easily play around with the dataset on the hub.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n\r\n- `datasets` version: 1.12.2.dev0\r\n- Platform: Linux-5.11.0-34-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.5\r\n- PyArrow version: 5.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3040\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3040\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3039","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3039\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3039\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3039\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3039","id":1018219800,"node_id":"PR_kwDODunzps4sy_J-","number":3039,"title":"Add sberquad dataset","user":{"login":"Alenush","id":13781234,"node_id":"MDQ6VXNlcjEzNzgxMjM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13781234?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Alenush","html_url":"https:\/\/github.com\/Alenush","followers_url":"https:\/\/api.github.com\/users\/Alenush\/followers","following_url":"https:\/\/api.github.com\/users\/Alenush\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Alenush\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Alenush\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Alenush\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Alenush\/orgs","repos_url":"https:\/\/api.github.com\/users\/Alenush\/repos","events_url":"https:\/\/api.github.com\/users\/Alenush\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Alenush\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-06T12:32:02Z","updated_at":"2021-10-13T10:19:11Z","closed_at":"2021-10-13T10:16:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3039","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3039","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3039.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3039.patch","merged_at":"2021-10-13T10:16:04Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3039\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3039\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3038","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3038\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3038\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3038\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3038","id":1018113499,"node_id":"PR_kwDODunzps4syno_","number":3038,"title":"add sberquad dataset","user":{"login":"Alenush","id":13781234,"node_id":"MDQ6VXNlcjEzNzgxMjM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13781234?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Alenush","html_url":"https:\/\/github.com\/Alenush","followers_url":"https:\/\/api.github.com\/users\/Alenush\/followers","following_url":"https:\/\/api.github.com\/users\/Alenush\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Alenush\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Alenush\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Alenush\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Alenush\/orgs","repos_url":"https:\/\/api.github.com\/users\/Alenush\/repos","events_url":"https:\/\/api.github.com\/users\/Alenush\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Alenush\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-06T11:33:39Z","updated_at":"2021-10-06T11:58:01Z","closed_at":"2021-10-06T11:58:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3038","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3038","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3038.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3038.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3038\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3038\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3037","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3037\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3037\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3037\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3037","id":1018091919,"node_id":"PR_kwDODunzps4syi15","number":3037,"title":"SberQuad","user":{"login":"Alenush","id":13781234,"node_id":"MDQ6VXNlcjEzNzgxMjM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13781234?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Alenush","html_url":"https:\/\/github.com\/Alenush","followers_url":"https:\/\/api.github.com\/users\/Alenush\/followers","following_url":"https:\/\/api.github.com\/users\/Alenush\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Alenush\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Alenush\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Alenush\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Alenush\/orgs","repos_url":"https:\/\/api.github.com\/users\/Alenush\/repos","events_url":"https:\/\/api.github.com\/users\/Alenush\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Alenush\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-06T11:21:08Z","updated_at":"2021-10-06T11:33:08Z","closed_at":"2021-10-06T11:33:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3037","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3037","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3037.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3037.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3037\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3037\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3036","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3036\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3036\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3036\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3036","id":1017687944,"node_id":"I_kwDODunzps48qK-I","number":3036,"title":"Protect master branch to force contributions via Pull Requests","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-10-06T07:34:17Z","updated_at":"2021-10-07T06:51:47Z","closed_at":"2021-10-07T06:49:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In order to have a clearer Git history in the master branch, I propose to protect it so that all contributions must be done through a Pull Request and no direct commits to master are allowed.\r\n- The Pull Request allows to give context, discuss any potential issues and improve the quality of the contribution\r\n- The Pull Request will eventually be squashed and merged into master with a single commit that links to the Pull Request page (with all the context\/discussions)\r\n\r\nNote that we already implemented a protection in the master branch to avoid *merge* commits and ensure a linear history. This proposal goes one step further by avoiding all kind of direct commits and forcing contributions **only** through Pull Requests.\r\n\r\nPlease note that we can temporarily deactivate this protection if we need to make a direct commit, e.g. at each new version release.\r\n\r\nThe only way GitHub allows this kind or protection is by requiring a minimal number (at least one) of approvals of the Pull Request. The inconvenient is that the PR creator cannot approve their own PR: another person must approve it before it can be merged into master. To circumvent this, we could eventually disable this protection in the master branch when an urgent commit is needed (e.g. for a hotfix) and there is no other person available at that time to approve the PR.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3036\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3036\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3035","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3035\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3035\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3035\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3035","id":1016770071,"node_id":"I_kwDODunzps48mq4X","number":3035,"title":"`load_dataset` does not work with uploaded arrow file","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-10-05T20:15:10Z","updated_at":"2021-10-06T17:01:37Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI've preprocessed and uploaded a dataset here: https:\/\/huggingface.co\/datasets\/ami-wav2vec2\/ami_headset_single_preprocessed . The dataset is in `.arrow` format.\r\n\r\nThe dataset can correctly be loaded when doing:\r\n\r\n```bash\r\ngit lfs install\r\ngit clone https:\/\/huggingface.co\/datasets\/ami-wav2vec2\/ami_headset_single_preprocessed\r\n```\r\n\r\nfollowed by \r\n\r\n```python\r\nfrom datasets import load_from_disk\r\n\r\nds = load_from_disk(\".\/ami_headset_single_preprocessed\")\r\n```\r\n\r\nHowever when I try to directly download the dataset as follows:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\"ami-wav2vec2\/ami_headset_single_preprocessed\")\r\n```\r\n\r\nthe following error occurs:\r\n\r\n```bash\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, task, streaming, **config_kwargs)\r\n 1115 ignore_verifications=ignore_verifications,\r\n 1116 try_from_hf_gcs=try_from_hf_gcs,\r\n-> 1117 use_auth_token=use_auth_token,\r\n 1118 )\r\n 1119 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 635 if not downloaded_from_gcs:\r\n 636 self._download_and_prepare(\r\n--> 637 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 638 )\r\n 639 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 724 try:\r\n 725 # Prepare split will record examples associated to the split\r\n--> 726 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 727 except OSError as e:\r\n 728 raise OSError(\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in _prepare_split(self, split_generator)\r\n 1186 generator, unit=\" tables\", leave=False, disable=bool(logging.get_verbosity() == logging.NOTSET)\r\n 1187 ):\r\n-> 1188 writer.write_table(table)\r\n 1189 num_examples, num_bytes = writer.finalize()\r\n 1190 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in write_table(self, pa_table, writer_batch_size)\r\n 424 # reorder the arrays if necessary + cast to self._schema\r\n 425 # we can't simply use .cast here because we may need to change the order of the columns\r\n--> 426 pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n 427 batches: List[pa.RecordBatch] = pa_table.to_batches(max_chunksize=writer_batch_size)\r\n 428 self._num_bytes += sum(batch.nbytes for batch in batches)\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.from_arrays()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/table.pxi in pyarrow.lib._sanitize_arrays()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/array.pxi in pyarrow.lib.asarray()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/table.pxi in pyarrow.lib.ChunkedArray.cast()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/compute.py in cast(arr, target_type, safe)\r\n 279 else:\r\n 280 options = CastOptions.unsafe(target_type)\r\n--> 281 return call_function(\"cast\", [arr], options)\r\n 282 \r\n 283 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/_compute.pyx in pyarrow._compute.call_function()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/_compute.pyx in pyarrow._compute.Function.call()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowNotImplementedError: Unsupported cast from struct, validation: struct, test: struct> to list using function cast_list\r\n```\r\n\r\n## Expected results\r\n\r\nThe dataset should be correctly loaded with `load_dataset` IMO.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.2.dev0\r\n- Platform: Linux-5.11.0-34-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.5\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3035\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3035\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3034","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3034\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3034\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3034\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3034","id":1016759202,"node_id":"I_kwDODunzps48moOi","number":3034,"title":"Errors loading dataset using fs = a gcsfs.GCSFileSystem","user":{"login":"dconatha","id":74556552,"node_id":"MDQ6VXNlcjc0NTU2NTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/74556552?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dconatha","html_url":"https:\/\/github.com\/dconatha","followers_url":"https:\/\/api.github.com\/users\/dconatha\/followers","following_url":"https:\/\/api.github.com\/users\/dconatha\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dconatha\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dconatha\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dconatha\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dconatha\/orgs","repos_url":"https:\/\/api.github.com\/users\/dconatha\/repos","events_url":"https:\/\/api.github.com\/users\/dconatha\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dconatha\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-05T20:07:08Z","updated_at":"2021-10-05T20:26:39Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nCannot load dataset using a `gcsfs.GCSFileSystem`. I'm not sure if this should be a bug in `gcsfs` or here...\r\n\r\nBasically what seems to be happening is that since datasets saves datasets as folders and folders aren't \"real objects\" in gcs, gcsfs raises a 404 error. There are workarounds if you use gcsfs directly to download the file, but as is I can't get `load_from_disk` to work.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n# load some dataset\r\ndataset = load_dataset(\"squad\", split=\"train\")\r\n\r\n# save it to gcs\r\nimport gcsfs\r\nfs = gcsfs.GCSFileSystem(project=\"my-gs-project\")\r\ndataset.save_to_disk(\"gs:\/\/my-bucket\/squad\", fs=fs)\r\n\r\n# try to load it from gcs\r\nfrom datasets import load_from_disk\r\ndataset2 = load_from_disk(\"my-bucket\/squad\", fs=fs)\r\n```\r\n\r\n\r\n## Expected results\r\n`dataset2` would be a copy of `dataset` but loaded from my bucket.\r\n\r\n## Actual results\r\nLong traceback but essentially it's a 404 error from gcsfs saying the object `my-bucket\/squad` doesn't exist when this is called:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/9c81b7d2e6d9feae69a084a3abda265a4ca07fb5\/src\/datasets\/arrow_dataset.py#L977\r\n\r\nThis is because there is no actual object called `my-bucket\/squad`, there are objects called `my-bucket\/squad\/dataset.arrow`, etc.\r\n\r\nNote that *this* works fine, since it's explicitly saying \"download all the objects with this prefix\":\r\n```python\r\n fs.download(src_dataset_path + \"\/*\", dataset_path.as_posix(), recursive=True)\r\n```\r\nFor example, I can do a workaround this way:\r\n```python\r\nimport tempfile\r\n\r\nwith tempfile.TemporaryDirectory() as temppath:\r\n fs.download(\"gs:\/\/my-bucket\/squad\/*\", temppath)\r\n dataset2 = load_from_disk(temppath)\r\n```\r\n\r\nIt's unclear to me if it's `gcsfs`'s responsibility to say \"hey that's folder not a file, I should try to get objects inside of it not the object itself\", or if that's `datasets`'s responsibility... I'm leaning towards the latter since you're never loading a dataset from one file using this function\/method, only a dataset folder?\r\n\r\nAnother minor thing that should maybe should be rolled into this bug...\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/9c81b7d2e6d9feae69a084a3abda265a4ca07fb5\/src\/datasets\/arrow_dataset.py#L968\r\n\r\nThese fail if you pass in a `gs:\/\/` path, e.g.\r\n\r\n```python\r\ndataset2 = load_from_disk(\"gs:\/\/my-bucket\/squad\", fs=fs)\r\n```\r\nBecause at this point, `dataset_info_path` is `gs:\/my-bucket\/squad\/dataset_info.json`, gcsfs throws a:\r\n```\r\nInvalid bucket name: 'gs:'\r\n```\r\nerror\r\n\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: macOS Big Sur 11.6\r\n- Python version: 3.7.12\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3034\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3034\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3033","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3033\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3033\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3033\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3033","id":1016619572,"node_id":"PR_kwDODunzps4std7u","number":3033,"title":"Actual \"proper\" install of ruamel.yaml in the windows CI","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-05T17:52:07Z","updated_at":"2021-10-05T17:54:57Z","closed_at":"2021-10-05T17:54:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3033","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3033","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3033.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3033.patch","merged_at":"2021-10-05T17:54:56Z"},"body":"It was impossible to update the package directly with `pip`. Indeed it was installed with `distutils` which prevents `pip` or `conda` to uninstall it.\r\n\r\nI had to `rm` a directory from the `site-packages` python directory, and then do `pip install ruamel.yaml`\r\n\r\nIt's not that \"proper\" but I couldn't find better solutions","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3033\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3033\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3032","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3032\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3032\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3032\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3032","id":1016488475,"node_id":"I_kwDODunzps48lmIb","number":3032,"title":"Error when loading private dataset with \"data_files\" arg","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-05T15:46:27Z","updated_at":"2021-10-12T15:26:22Z","closed_at":"2021-10-12T15:25:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\nPrivate datasets with no loading script can't be loaded using `data_files` parameter.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndata_files = {\"train\": \"**\/train\/*\/*.jsonl\", \"valid\": \"**\/valid\/*\/*.jsonl\"}\r\ndataset = load_dataset('dalle-mini\/encoded', data_files=data_files, use_auth_token=True, streaming=True)\r\n```\r\n\r\nSame error happens in non-streaming mode.\r\n\r\n## Expected results\r\nFiles should be loaded (whether in streaming or not).\r\n\r\n## Actual results\r\nError:\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nFileNotFoundError Traceback (most recent call last)\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, dynamic_modules_path, return_resolved_file_path, return_associated_base_path, data_files, **download_kwargs)\r\n 539 try:\r\n--> 540 local_path = cached_path(file_path, download_config=download_config)\r\n 541 except FileNotFoundError:\r\n\r\n8 frames\r\nFileNotFoundError: Couldn't find file at https:\/\/huggingface.co\/datasets\/dalle-mini\/encoded\/resolve\/main\/encoded.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nHTTPError Traceback (most recent call last)\r\nHTTPError: 404 Client Error: Not Found for url: https:\/\/huggingface.co\/api\/datasets\/dalle-mini\/encoded?full=true\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nFileNotFoundError Traceback (most recent call last)\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, dynamic_modules_path, return_resolved_file_path, return_associated_base_path, data_files, **download_kwargs)\r\n 547 except Exception:\r\n 548 raise FileNotFoundError(\r\n--> 549 f\"Couldn't find a directory or a {resource_type} named '{path}'. \"\r\n 550 f\"It doesn't exist locally at {expected_dir_for_combined_path_abs} or remotely on {hf_api.endpoint}\/datasets\"\r\n 551 )\r\n\r\nFileNotFoundError: Couldn't find a directory or a dataset named 'dalle-mini\/encoded'. It doesn't exist locally at \/content\/dalle-mini\/encoded or remotely on https:\/\/huggingface.co\/datasets\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3032\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3032\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3031","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3031\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3031\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3031\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3031","id":1016458496,"node_id":"PR_kwDODunzps4ss9jn","number":3031,"title":"Align tqdm control with cache control","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-05T15:18:49Z","updated_at":"2021-10-18T15:00:21Z","closed_at":"2021-10-18T14:59:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3031","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3031","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3031.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3031.patch","merged_at":"2021-10-18T14:59:30Z"},"body":"Currently, once disabled with `disable_progress_bar`, progress bars cannot be re-enabled again. To overcome this limitation, this PR introduces the `set_progress_bar_enabled` function that accepts a boolean indicating whether to display progress bars. The goal is to provide a similar API to the existing cache control API. Following the Zen of Python (\ud83d\ude04), there should be one and preferably only one obvious way to do it, so I'm also deprecating the aforementioned `disable_progress_bar` function. Additionally, I justify the deprecation with the fact that this function has never been in the docs.\r\n\r\nMoreover, similar API changes have recently been introduced to [`tfds`](https:\/\/github.com\/tensorflow\/datasets\/blob\/a1e8b98f45b0214082b546cc967c67c43fffda55\/tensorflow_datasets\/core\/utils\/tqdm_utils.py#L98-L112).\r\n\r\nConsidering the popularity of the [comment](https:\/\/github.com\/huggingface\/datasets\/issues\/1627#issuecomment-751383559) I made a while ago, this API (`set_progress_bar_enabled` and `is_progress_bar_enabled`) should be mentioned in the docs, but I'm not sure where to put it exactly. Maybe we can replace the `logging_methods` page under `package_reference` with `utility_methods` and then introduce two subsections on that page: `Logging methods` and `tqdm control`.\r\n\r\nAdditionally, this PR:\r\n* adds the `disable_tqdm` keyword arg of `Dataset._map_single` to the `ignore_kwargs` list to ignore it when computing the fingerprint (forgot to add it in #2696)\r\n* deletes the unused components in `tqdm_utils.py`, which seem to be inherited from `tfds`\r\n* disables the tqdm output in the test suite. As I see it, this output doesn't seem informative, but let me know if this is not a good idea","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3031\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3031\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3030","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3030\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3030\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3030\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3030","id":1016435324,"node_id":"PR_kwDODunzps4ss41W","number":3030,"title":"Add `remove_columns` to `IterableDataset`","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-05T14:58:33Z","updated_at":"2021-10-08T15:33:15Z","closed_at":"2021-10-08T15:31:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3030","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3030","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3030.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3030.patch","merged_at":"2021-10-08T15:31:53Z"},"body":"Fixes #2944\r\n\r\nWIP\r\n* Not tested yet.\r\n* We might want to allow batched remove for efficiency.\r\n\r\n@lhoestq Do you think it should have `batched=` and `batch_size=`?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3030\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3030\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3029","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3029\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3029\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3029\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3029","id":1016389901,"node_id":"PR_kwDODunzps4ssvkr","number":3029,"title":"Use standard open-domain validation split in nq_open","user":{"login":"craffel","id":417568,"node_id":"MDQ6VXNlcjQxNzU2OA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/417568?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/craffel","html_url":"https:\/\/github.com\/craffel","followers_url":"https:\/\/api.github.com\/users\/craffel\/followers","following_url":"https:\/\/api.github.com\/users\/craffel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/craffel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/craffel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/craffel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/craffel\/orgs","repos_url":"https:\/\/api.github.com\/users\/craffel\/repos","events_url":"https:\/\/api.github.com\/users\/craffel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/craffel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-05T14:19:27Z","updated_at":"2021-10-05T14:56:46Z","closed_at":"2021-10-05T14:56:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3029","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3029","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3029.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3029.patch","merged_at":"2021-10-05T14:56:45Z"},"body":"The nq_open dataset originally drew the validation set from this file:\r\nhttps:\/\/github.com\/google-research-datasets\/natural-questions\/blob\/master\/nq_open\/NQ-open.efficientqa.dev.1.1.sample.jsonl\r\nHowever, that's the dev set used specifically and only for the efficientqa competition, and it's not the same dev set as is used in every open-domain question answering paper (including the Lee et al paper that introduced the open-domain variant of NQ, cited at the top of the dataset file). This PR changes nq_open to use the standard validation split and bumps the version to 2.0.0 since this is a breaking change.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3029\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3029\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3028","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3028\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3028\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3028\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3028","id":1016230272,"node_id":"PR_kwDODunzps4ssO4s","number":3028,"title":"Properly install ruamel-yaml for windows CI","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-10-05T11:51:15Z","updated_at":"2021-10-05T14:02:12Z","closed_at":"2021-10-05T11:51:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3028","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3028","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3028.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3028.patch","merged_at":"2021-10-05T11:51:22Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3028\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3028\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3027","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3027\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3027\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3027\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3027","id":1016150117,"node_id":"I_kwDODunzps48kThl","number":3027,"title":"Resolve data_files by split name","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-10-05T10:24:36Z","updated_at":"2021-11-05T17:49:58Z","closed_at":"2021-11-05T17:49:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This issue is about discussing the default behavior when someone loads a dataset that consists in data files. For example:\r\n```python\r\nload_dataset(\"lhoestq\/demo1\")\r\n```\r\nshould return two splits \"train\" and \"test\" since the dataset repostiory is like\r\n```\r\ndata\/\r\n\u251c\u2500\u2500 train.csv\r\n\u2514\u2500\u2500 test.csv\r\n```\r\nCurrently it returns only one split \"train\" which contains the data of both files\r\n\r\n\r\nI started playing with this idea on this branch btw: `resolve-data_files-by-split-name`\r\nBasically the idea is that if you named you data files after split names then the default pattern is\r\n```python\r\n{\r\n \"train\": [\"*train*\"],\r\n \"test\": [\"*test*\"],\r\n \"validation\": [\"*dev*\", \"valid\"],\r\n}\r\n```\r\notherwise it's\r\n```python\r\n{\r\n \"train\": [\"*\"]\r\n}\r\n```\r\n\r\nLet me know what you think !\r\ncc @albertvillanova @LysandreJik @vblagoje ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3027\/reactions","total_count":3,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3027\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3026","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3026\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3026\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3026\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3026","id":1016067794,"node_id":"PR_kwDODunzps4srtyc","number":3026,"title":"added arxiv paper inswiss_judgment_prediction dataset card","user":{"login":"JoelNiklaus","id":3775944,"node_id":"MDQ6VXNlcjM3NzU5NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3775944?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JoelNiklaus","html_url":"https:\/\/github.com\/JoelNiklaus","followers_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/followers","following_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/orgs","repos_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/repos","events_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-05T09:02:01Z","updated_at":"2021-10-08T16:01:44Z","closed_at":"2021-10-08T16:01:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3026","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3026","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3026.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3026.patch","merged_at":"2021-10-08T16:01:24Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3026\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3026\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3025","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3025\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3025\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3025\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3025","id":1016061222,"node_id":"PR_kwDODunzps4srsgG","number":3025,"title":"Fix Windows test suite","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-05T08:55:22Z","updated_at":"2021-10-05T09:58:28Z","closed_at":"2021-10-05T09:58:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3025","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3025","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3025.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3025.patch","merged_at":"2021-10-05T09:58:27Z"},"body":"Try a hotfix to restore Windows test suite.\r\n\r\nFix #3024.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3025\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3025\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3024","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3024\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3024\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3024\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3024","id":1016052911,"node_id":"I_kwDODunzps48j7yv","number":3024,"title":"Windows test suite fails","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-10-05T08:46:46Z","updated_at":"2021-10-05T09:58:27Z","closed_at":"2021-10-05T09:58:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nThere is an error during installation of tests dependencies for Windows: https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/7981\/workflows\/9b6a0114-2b8e-4069-94e5-e844dbbdba4e\/jobs\/49206\r\n\r\n```\r\nERROR: Cannot uninstall 'ruamel-yaml'. It is a distutils installed project and thus we cannot accurately determine which files belong to it which would lead to only a partial uninstall.\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3024\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3024\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3023","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3023\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3023\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3023\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3023","id":1015923031,"node_id":"PR_kwDODunzps4srQ4i","number":3023,"title":"Fix typo","user":{"login":"qqaatw","id":24835382,"node_id":"MDQ6VXNlcjI0ODM1Mzgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24835382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/qqaatw","html_url":"https:\/\/github.com\/qqaatw","followers_url":"https:\/\/api.github.com\/users\/qqaatw\/followers","following_url":"https:\/\/api.github.com\/users\/qqaatw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/qqaatw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/qqaatw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/qqaatw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/qqaatw\/orgs","repos_url":"https:\/\/api.github.com\/users\/qqaatw\/repos","events_url":"https:\/\/api.github.com\/users\/qqaatw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/qqaatw\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-05T06:06:11Z","updated_at":"2021-10-05T11:56:55Z","closed_at":"2021-10-05T11:56:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3023","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3023","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3023.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3023.patch","merged_at":"2021-10-05T11:56:55Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3023\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3023\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3022","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3022\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3022\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3022\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3022","id":1015750221,"node_id":"PR_kwDODunzps4sqve6","number":3022,"title":"MeDAL dataset: Add further description and update download URL","user":{"login":"xhlulu","id":21180505,"node_id":"MDQ6VXNlcjIxMTgwNTA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21180505?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/xhlulu","html_url":"https:\/\/github.com\/xhlulu","followers_url":"https:\/\/api.github.com\/users\/xhlulu\/followers","following_url":"https:\/\/api.github.com\/users\/xhlulu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/xhlulu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/xhlulu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/xhlulu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/xhlulu\/orgs","repos_url":"https:\/\/api.github.com\/users\/xhlulu\/repos","events_url":"https:\/\/api.github.com\/users\/xhlulu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/xhlulu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-05T00:13:28Z","updated_at":"2021-10-13T09:03:09Z","closed_at":"2021-10-13T09:03:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3022","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3022","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3022.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3022.patch","merged_at":"2021-10-13T09:03:09Z"},"body":"Added more details in the following sections:\r\n* Dataset Structure\r\n* Data Instances\r\n* Data Splits\r\n* Source Data\r\n* Annotations\r\n* Discussions of Biases\r\n* LIcensing Information","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3022\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3022\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3021","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3021\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3021\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3021\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3021","id":1015444094,"node_id":"PR_kwDODunzps4spzJU","number":3021,"title":"Support loading dataset from multiple zipped CSV data files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T17:33:57Z","updated_at":"2021-10-06T08:36:46Z","closed_at":"2021-10-06T08:36:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3021","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3021","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3021.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3021.patch","merged_at":"2021-10-06T08:36:45Z"},"body":"Fix partially #3018.\r\n\r\nCC: @lewtun ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3021\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3021\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3020","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3020\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3020\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3020\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3020","id":1015406105,"node_id":"PR_kwDODunzps4sprfa","number":3020,"title":"Add a metric for the MATH dataset (competition_math).","user":{"login":"hacobe","id":91226467,"node_id":"MDQ6VXNlcjkxMjI2NDY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/91226467?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hacobe","html_url":"https:\/\/github.com\/hacobe","followers_url":"https:\/\/api.github.com\/users\/hacobe\/followers","following_url":"https:\/\/api.github.com\/users\/hacobe\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hacobe\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hacobe\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hacobe\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hacobe\/orgs","repos_url":"https:\/\/api.github.com\/users\/hacobe\/repos","events_url":"https:\/\/api.github.com\/users\/hacobe\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hacobe\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-04T16:52:16Z","updated_at":"2021-10-22T10:29:31Z","closed_at":"2021-10-22T10:29:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3020","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3020","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3020.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3020.patch","merged_at":"2021-10-22T10:29:31Z"},"body":"This metric computes accuracy for the MATH dataset (https:\/\/arxiv.org\/abs\/2103.03874) after canonicalizing the prediction and the reference (e.g., converting \"1\/2\" to \"\\\\\\\\frac{1}{2}\").","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3020\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3020\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3019","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3019\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3019\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3019\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3019","id":1015339983,"node_id":"PR_kwDODunzps4speOB","number":3019,"title":"Fix filter leaking","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T15:42:58Z","updated_at":"2021-11-02T09:58:18Z","closed_at":"2021-10-05T08:33:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3019","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3019","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3019.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3019.patch","merged_at":"2021-10-05T08:33:07Z"},"body":"If filter is called after using a first transform `shuffle`, `select`, `shard`, `train_test_split`, or `filter`, then it could not work as expected and return examples from before the first transform. This is because the indices mapping was not taken into account when saving the indices to keep when doing the filtering\r\n\r\nAffected versions: 1.12.0 and 1.12.1\r\n\r\nThis should fix issue https:\/\/github.com\/huggingface\/datasets\/issues\/3010","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3019\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3019\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3018","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3018\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3018\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3018\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3018","id":1015311877,"node_id":"I_kwDODunzps48hG4F","number":3018,"title":"Support multiple zipped CSV data files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-10-04T15:16:59Z","updated_at":"2021-10-05T14:32:57Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As requested by @lewtun, support loading multiple zipped CSV data files.\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nurl = \"https:\/\/domain.org\/filename.zip\"\r\ndata_files = {\"train\": \"train_filename.csv\", \"test\": \"test_filename.csv\"}\r\ndataset = load_dataset(\"csv\", data_dir=url, data_files=data_files)\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3018\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3018\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3017","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3017\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3017\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3017\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3017","id":1015215528,"node_id":"PR_kwDODunzps4spE9m","number":3017,"title":"Remove unused parameter in xdirname","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T13:55:53Z","updated_at":"2021-10-05T11:37:01Z","closed_at":"2021-10-05T11:37:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3017","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3017","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3017.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3017.patch","merged_at":"2021-10-05T11:37:00Z"},"body":"Minor fix to remove unused args `*p` in `xdirname`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3017\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3017\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3016","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3016\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3016\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3016\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3016","id":1015208654,"node_id":"PR_kwDODunzps4spDlX","number":3016,"title":"Fix Windows paths in LJ Speech dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T13:49:37Z","updated_at":"2021-10-04T15:23:05Z","closed_at":"2021-10-04T15:23:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3016","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3016","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3016.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3016.patch","merged_at":"2021-10-04T15:23:04Z"},"body":"Minor fix in LJ Speech dataset for Windows pathname component separator.\r\n\r\nRelated to #1878.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3016\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3016\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3015","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3015\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3015\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3015\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3015","id":1015130845,"node_id":"PR_kwDODunzps4so0GX","number":3015,"title":"Extend support for streaming datasets that use glob.glob","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T12:42:37Z","updated_at":"2021-10-05T13:46:39Z","closed_at":"2021-10-05T13:46:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3015","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3015","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3015.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3015.patch","merged_at":"2021-10-05T13:46:38Z"},"body":"This PR extends the support in streaming mode for datasets that use `glob`, by patching the function `glob.glob`.\r\n\r\nRelated to #2880, #2876, #2874","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3015\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3015\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3014","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3014\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3014\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3014\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3014","id":1015070751,"node_id":"PR_kwDODunzps4son8A","number":3014,"title":"Fix Windows path in MATH dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T11:41:07Z","updated_at":"2021-10-04T12:46:44Z","closed_at":"2021-10-04T12:46:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3014","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3014","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3014.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3014.patch","merged_at":"2021-10-04T12:46:44Z"},"body":"Minor fix in MATH dataset for Windows pathname component separator.\r\n\r\nRelated to #2982.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3014\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3014\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3013","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3013\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3013\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3013\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3013","id":1014960419,"node_id":"I_kwDODunzps48fxEj","number":3013,"title":"Improve `get_dataset_infos`?","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-04T09:47:04Z","updated_at":"2021-10-22T09:36:09Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Using the dedicated function `get_dataset_infos` on a dataset that has no dataset-info.json file returns an empty info:\r\n\r\n```\r\n>>> from datasets import get_dataset_infos\r\n>>> get_dataset_infos('wit')\r\n{}\r\n```\r\n\r\nWhile it's totally possible to get it (regenerate it) with:\r\n\r\n```\r\n>>> from datasets import load_dataset_builder\r\n>>> builder = load_dataset_builder('wit')\r\n>>> builder.info\r\nDatasetInfo(description='Wikipedia-based Image Text (WIT) Dataset is a large multimodal multilingual dataset. WIT is composed of a curated set\\n of 37.6 million entity rich image-text examples with 11.5 million unique images across 108 Wikipedia languages. Its\\n size enables WIT to be used as a pretraining dataset for multimodal machine learning models.\\n', citation='@article{srinivasan2021wit,\\n title={WIT: Wikipedia-based Image Text Dataset for Multimodal Multilingual Machine Learning},\\n author={Srinivasan, Krishna and Raman, Karthik and Chen, Jiecao and Bendersky, Michael and Najork, Marc},\\n journal={arXiv preprint arXiv:2103.01913},\\n year={2021}\\n}\\n', homepage='https:\/\/github.com\/google-research-datasets\/wit', license='', features={'b64_bytes': Value(dtype='string', id=None), 'embedding': Sequence(feature=Value(dtype='float64', id=None), length=-1, id=None), 'image_url': Value(dtype='string', id=None), 'metadata_url': Value(dtype='string', id=None), 'original_height': Value(dtype='int32', id=None), 'original_width': Value(dtype='int32', id=None), 'mime_type': Value(dtype='string', id=None), 'caption_attribution_description': Value(dtype='string', id=None), 'wit_features': Sequence(feature={'language': Value(dtype='string', id=None), 'page_url': Value(dtype='string', id=None), 'attribution_passes_lang_id': Value(dtype='string', id=None), 'caption_alt_text_description': Value(dtype='string', id=None), 'caption_reference_description': Value(dtype='string', id=None), 'caption_title_and_reference_description': Value(dtype='string', id=None), 'context_page_description': Value(dtype='string', id=None), 'context_section_description': Value(dtype='string', id=None), 'hierarchical_section_title': Value(dtype='string', id=None), 'is_main_image': Value(dtype='string', id=None), 'page_changed_recently': Value(dtype='string', id=None), 'page_title': Value(dtype='string', id=None), 'section_title': Value(dtype='string', id=None)}, length=-1, id=None)}, post_processed=None, supervised_keys=None, task_templates=None, builder_name='wit', config_name='default', version=0.0.0, splits=None, download_checksums=None, download_size=None, post_processing_size=None, dataset_size=None, size_in_bytes=None)\r\n```\r\n\r\nShould we test if info is empty, and in that case regenerate it? Or always generate it?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3013\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3013\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3012","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3012\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3012\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3012\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3012","id":1014958931,"node_id":"PR_kwDODunzps4soRTu","number":3012,"title":"Replace item with float in metrics","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T09:45:28Z","updated_at":"2021-10-04T11:30:34Z","closed_at":"2021-10-04T11:30:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3012","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3012","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3012.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3012.patch","merged_at":"2021-10-04T11:30:33Z"},"body":"As pointed out by @mariosasko in #3001, calling `float()` instad of `.item()` is faster.\r\n\r\nMoreover, it might avoid potential issues if any of the third-party functions eventually returns a `float` instead of an `np.float64`.\r\n\r\nRelated to #3001.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3012\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3012\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3011","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3011\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3011\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3011\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3011","id":1014935713,"node_id":"I_kwDODunzps48frCh","number":3011,"title":"load_dataset_builder should error if \"name\" does not exist?","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-04T09:20:46Z","updated_at":"2021-10-22T09:36:09Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nimport datasets as ds\r\nbuilder = ds.load_dataset_builder('sent_comp', name=\"doesnotexist\")\r\nbuilder.info.config_name\r\n```\r\n\r\nreturns\r\n\r\n```\r\n'doesnotexist'\r\n```\r\n\r\nShouldn't it raise an error instead?\r\n\r\nFor this dataset, the only valid values for `name` should be: `\"default\"` or `None` (ie. argument not passed)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3011\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3011\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3010","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3010\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3010\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3010\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3010","id":1014918470,"node_id":"I_kwDODunzps48fm1G","number":3010,"title":"Chain filtering is leaking","user":{"login":"DrMatters","id":22641583,"node_id":"MDQ6VXNlcjIyNjQxNTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22641583?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/DrMatters","html_url":"https:\/\/github.com\/DrMatters","followers_url":"https:\/\/api.github.com\/users\/DrMatters\/followers","following_url":"https:\/\/api.github.com\/users\/DrMatters\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/DrMatters\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/DrMatters\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/DrMatters\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/DrMatters\/orgs","repos_url":"https:\/\/api.github.com\/users\/DrMatters\/repos","events_url":"https:\/\/api.github.com\/users\/DrMatters\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/DrMatters\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-10-04T09:04:55Z","updated_at":"2021-10-05T08:36:08Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAs there's no support for lists within dataset fields, I convert my lists to json-string format. However, the bug described is occurring even when the data format is 'string'.\r\nThese samples show that filtering behavior diverges from what's expected when chaining filterings.\r\nOn sample 2 the second filtering leads to \"leaking\" of data that should've been filtered on the first filtering into the results.\r\n\r\n## Steps to reproduce the bug\r\nSample 1:\r\n```python\r\nimport datasets\r\nimport json\r\n\r\nitems = [[1, 2], [3], [4]]\r\njsoned_items = map(json.dumps, [[1, 2], [3], [4]])\r\nds = datasets.Dataset.from_dict({'a': jsoned_items})\r\nprint(list(ds))\r\n# > Prints: [{'a': '[1, 2]'}, {'a': '[3]'}, {'a': '[4]'}] as expected\r\n\r\nfiltered = ds\r\n\r\n# get all lists that are shorter than 2\r\nfiltered = filtered.filter(lambda x: len(json.loads(x['a'])) < 2, load_from_cache_file=False)\r\nprint(list(filtered))\r\n# > Prints: [{'a': '[3]'}, {'a': '[4]'}] as expected\r\n\r\n# get all lists, which have a value bigger than 3 on its zero index\r\nfiltered = filtered.filter(lambda x: json.loads(x['a'])[0] > 3, load_from_cache_file=False)\r\nprint(list(filtered))\r\n# > Should be: [{'a': [4]}]\r\n# > Prints: [{'a': [3]}]\r\n```\r\nSample 2:\r\n```python\r\nimport datasets\r\nimport json\r\n\r\nitems = [[1, 2], [3], [4]]\r\njsoned_items = map(json.dumps, [[1, 2], [3], [4]])\r\nds = datasets.Dataset.from_dict({'a': jsoned_items})\r\nprint(list(ds))\r\n# > Prints: [{'a': '[1, 2]'}, {'a': '[3]'}, {'a': '[4]'}]\r\n\r\nfiltered = ds\r\n\r\n# get all lists, which have a value bigger than 3 on its zero index\r\nfiltered = filtered.filter(lambda x: json.loads(x['a'])[0] > 3, load_from_cache_file=False)\r\nprint(list(filtered))\r\n# > Prints: [{'a': '[4]'}] as expected\r\n\r\n# get all lists that are shorter than 2\r\nfiltered = filtered.filter(lambda x: len(json.loads(x['a'])) < 2, load_from_cache_file=False)\r\nprint(list(filtered))\r\n# > Prints: [{'a': '[1, 2]'}]\r\n# > Should be: [{'a': '[4]'}] (remain intact)\r\n```\r\n\r\n## Expected results\r\nExpected and actual results are attached to the code snippets.\r\n\r\n## Actual results\r\nExpected and actual results are attached to the code snippets.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Windows-10-10.0.19042-SP0\r\n- Python version: 3.9.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3010\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3010\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3009","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3009\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3009\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3009\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3009","id":1014868235,"node_id":"PR_kwDODunzps4sn_YG","number":3009,"title":"Fix Windows paths in SUPERB benchmark datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T08:13:49Z","updated_at":"2021-10-04T13:43:25Z","closed_at":"2021-10-04T13:43:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3009","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3009","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3009.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3009.patch","merged_at":"2021-10-04T13:43:24Z"},"body":"Minor fix in SUPERB benchmark datasets for Windows pathname component separator.\r\n\r\nRelated to #2884, #2783 and #2619.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3009\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3009\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3008","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3008\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3008\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3008\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3008","id":1014849163,"node_id":"PR_kwDODunzps4sn7iU","number":3008,"title":"Fix precision\/recall metrics with None average","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T07:54:15Z","updated_at":"2021-10-04T09:29:37Z","closed_at":"2021-10-04T09:29:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3008","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3008","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3008.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3008.patch","merged_at":"2021-10-04T09:29:36Z"},"body":"Related to issue #2979 and PR #2992.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3008\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3008\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3007","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3007\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3007\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3007\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3007","id":1014775450,"node_id":"PR_kwDODunzps4sns-n","number":3007,"title":"Correct a typo","user":{"login":"Yann21","id":35955430,"node_id":"MDQ6VXNlcjM1OTU1NDMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35955430?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Yann21","html_url":"https:\/\/github.com\/Yann21","followers_url":"https:\/\/api.github.com\/users\/Yann21\/followers","following_url":"https:\/\/api.github.com\/users\/Yann21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Yann21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Yann21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Yann21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Yann21\/orgs","repos_url":"https:\/\/api.github.com\/users\/Yann21\/repos","events_url":"https:\/\/api.github.com\/users\/Yann21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Yann21\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T06:15:47Z","updated_at":"2021-10-04T09:27:57Z","closed_at":"2021-10-04T09:27:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3007","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3007","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3007.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3007.patch","merged_at":"2021-10-04T09:27:57Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3007\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3007\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3006","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3006\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3006\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3006\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3006","id":1014770821,"node_id":"PR_kwDODunzps4snsBm","number":3006,"title":"Fix Windows paths in CommonLanguage dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-04T06:08:58Z","updated_at":"2021-10-04T09:07:58Z","closed_at":"2021-10-04T09:07:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3006","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3006","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3006.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3006.patch","merged_at":"2021-10-04T09:07:58Z"},"body":"Minor fix in CommonLanguage dataset for Windows pathname component separator.\r\n\r\nRelated to #2989.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3006\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3006\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3005","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3005\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3005\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3005\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3005","id":1014615420,"node_id":"I_kwDODunzps48ec18","number":3005,"title":"DatasetDict.filter and Dataset.filter crashes with any \"fn_kwargs\" argument","user":{"login":"DrMatters","id":22641583,"node_id":"MDQ6VXNlcjIyNjQxNTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22641583?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/DrMatters","html_url":"https:\/\/github.com\/DrMatters","followers_url":"https:\/\/api.github.com\/users\/DrMatters\/followers","following_url":"https:\/\/api.github.com\/users\/DrMatters\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/DrMatters\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/DrMatters\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/DrMatters\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/DrMatters\/orgs","repos_url":"https:\/\/api.github.com\/users\/DrMatters\/repos","events_url":"https:\/\/api.github.com\/users\/DrMatters\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/DrMatters\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-10-04T00:49:29Z","updated_at":"2021-10-11T10:18:01Z","closed_at":"2021-10-04T08:46:13Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe \".filter\" method of DatasetDict or Dataset objects fails when passing any \"fn_kwargs\" argument\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n\r\nexample_dataset = datasets.Dataset.from_dict({\"a\": {1, 2, 3, 4}})\r\n\r\ndef filter_value(example, value):\r\n return example['a'] == value\r\n\r\nfiltered = example_dataset.filter(filter_value, fn_kwargs={'value': 3})\r\n```\r\n\r\n## Expected results\r\n`filtered` is a dataset containing {\"a\": {3}}\r\n\r\n## Actual results\r\n\r\n> Traceback (most recent call last):\r\n> File \"C:\\Users\\qsemi\\Documents\\git\\nlp_experiments\\gpt_celebrity\\src\\test_faulty_filter.py\", line 8, in \r\n> filtered = example_dataset.filter(filter_value, fn_kwargs={'value': 3})\r\n> File \"C:\\Users\\qsemi\\miniconda3\\envs\\main\\lib\\site-packages\\datasets\\arrow_dataset.py\", line 185, in wrapper\r\n> out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n> File \"C:\\Users\\qsemi\\miniconda3\\envs\\main\\lib\\site-packages\\datasets\\fingerprint.py\", line 398, in wrapper\r\n> out = func(self, *args, **kwargs)\r\n> File \"C:\\Users\\qsemi\\miniconda3\\envs\\main\\lib\\site-packages\\datasets\\arrow_dataset.py\", line 2169, in filter\r\n> indices = self.map(\r\n> File \"C:\\Users\\qsemi\\miniconda3\\envs\\main\\lib\\site-packages\\datasets\\arrow_dataset.py\", line 1686, in map\r\n> return self._map_single(\r\n> File \"C:\\Users\\qsemi\\miniconda3\\envs\\main\\lib\\site-packages\\datasets\\arrow_dataset.py\", line 185, in wrapper\r\n> out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n> File \"C:\\Users\\qsemi\\miniconda3\\envs\\main\\lib\\site-packages\\datasets\\fingerprint.py\", line 398, in wrapper\r\n> out = func(self, *args, **kwargs)\r\n> File \"C:\\Users\\qsemi\\miniconda3\\envs\\main\\lib\\site-packages\\datasets\\arrow_dataset.py\", line 2048, in _map_single\r\n> batch = apply_function_on_filtered_inputs(\r\n> File \"C:\\Users\\qsemi\\miniconda3\\envs\\main\\lib\\site-packages\\datasets\\arrow_dataset.py\", line 1939, in apply_function_on_filtered_inputs\r\n> function(*fn_args, effective_indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n> TypeError: get_indices_from_mask_function() got an unexpected keyword argument 'value'\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Windows-10-10.0.19042-SP0\r\n- Python version: 3.9.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3005\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3005\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3004","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3004\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3004\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3004\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3004","id":1014336617,"node_id":"PR_kwDODunzps4smfPF","number":3004,"title":"LexGLUE: A Benchmark Dataset for Legal Language Understanding in English.","user":{"login":"iliaschalkidis","id":1626984,"node_id":"MDQ6VXNlcjE2MjY5ODQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1626984?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliaschalkidis","html_url":"https:\/\/github.com\/iliaschalkidis","followers_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/followers","following_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/repos","events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-03T10:03:25Z","updated_at":"2021-10-13T13:37:02Z","closed_at":"2021-10-13T13:37:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3004","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3004","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3004.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3004.patch","merged_at":"2021-10-13T13:37:01Z"},"body":"Inspired by the recent widespread use of the GLUE multi-task benchmark NLP dataset (Wang et al., 2018), the subsequent more difficult SuperGLUE (Wang et al., 2019), other previous multi-task NLP benchmarks (Conneau and Kiela, 2018; McCann et al., 2018), and similar initiatives in other domains (Peng et al., 2019), we introduce the Legal General Language Understanding Evaluation (LexGLUE) benchmark, a benchmark dataset to evaluate the performance of NLP methods in legal tasks. LexGLUE is based on seven existing legal NLP datasets, selected using criteria largely from SuperGLUE.\r\n\r\nAs in GLUE and SuperGLUE (Wang et al., 2019b,a), one of our goals is to push towards generic (or \u2018foundation\u2019) models that can cope with multiple NLP tasks, in our case legal NLP tasks possibly with limited task-specific fine-tuning. Another goal is to provide a convenient and informative entry point for NLP researchers and practitioners wishing to explore or develop methods for legalNLP. Having these goals in mind, the datasets we include in LexGLUE and the tasks they address have been simplified in several ways to make it easier for newcomers and generic models to address all tasks.\r\n\r\nLexGLUE benchmark is accompanied by experimental infrastructure that relies on Hugging Face Transformers library and resides at: https:\/\/github.com\/coastalcph\/lex-glue.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3004\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3004\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3003","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3003\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3003\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3003\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3003","id":1014137933,"node_id":"PR_kwDODunzps4smExP","number":3003,"title":"common_language: Fix license in README.md","user":{"login":"jimregan","id":227350,"node_id":"MDQ6VXNlcjIyNzM1MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/227350?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jimregan","html_url":"https:\/\/github.com\/jimregan","followers_url":"https:\/\/api.github.com\/users\/jimregan\/followers","following_url":"https:\/\/api.github.com\/users\/jimregan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jimregan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jimregan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jimregan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jimregan\/orgs","repos_url":"https:\/\/api.github.com\/users\/jimregan\/repos","events_url":"https:\/\/api.github.com\/users\/jimregan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jimregan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-02T18:47:37Z","updated_at":"2021-10-04T09:27:01Z","closed_at":"2021-10-04T09:27:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3003","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3003","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3003.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3003.patch","merged_at":"2021-10-04T09:27:01Z"},"body":"...it's correct elsewhere","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3003\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3003\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3002","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3002\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3002\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3002\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3002","id":1014120524,"node_id":"PR_kwDODunzps4smCNO","number":3002,"title":"Remove a reference to the open Arrow file when deleting a TF dataset created with to_tf_dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-02T17:44:09Z","updated_at":"2021-10-13T11:48:00Z","closed_at":"2021-10-13T09:03:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3002","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3002","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3002.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3002.patch","merged_at":"2021-10-13T09:03:23Z"},"body":"This [comment](https:\/\/github.com\/huggingface\/datasets\/issues\/2934#issuecomment-922970919) explains the issue. This PR fixes that with a `weakref` callback, and additionally:\r\n* renames `TensorflowDatasetMixIn` to `TensorflowDatasetMixin` for consistency\r\n* correctly indents `TensorflowDatasetMixin`'s docstring\r\n* replaces `tf.data.AUTOTUNE` with `tf.data.experimental.AUTOTUNE` (we support TF>=2.2 according to the [setup.py](https:\/\/github.com\/huggingface\/datasets\/blob\/fc46bba66ba4f432cc10501c16a677112e13984c\/setup.py#L188) and `AUTOTUNE` has been moved to the experimental part of `tf.data` in 1.X if I'm not mistaken)\r\n\r\nFixes #2934","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3002\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3002\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3001","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3001\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3001\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3001\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3001","id":1014024982,"node_id":"PR_kwDODunzps4sl0BY","number":3001,"title":"Fix cast to Python scalar in Matthews Correlation metric","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-02T11:44:59Z","updated_at":"2021-10-04T09:54:04Z","closed_at":"2021-10-04T09:26:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3001","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3001","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3001.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3001.patch","merged_at":"2021-10-04T09:26:12Z"},"body":"This PR is motivated by issue #2964.\r\n\r\nThe Matthews Correlation metric relies on sklearn's `matthews_corrcoef` function to compute the result. This function returns either `float` or `np.float64` (see the [source](https:\/\/github.com\/scikit-learn\/scikit-learn\/blob\/844b4be24d20fc42cc13b957374c718956a0db39\/sklearn\/metrics\/_classification.py#L906-L909)). Obviously, calling `.item()` on the float value will fail, so I'm fixing this with the built-in `float()` function, which covers both cases. Surprisingly, on my machine, casting `np.float64` to a Python scalar with `float()` is even faster than with the `.item()` method.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3001\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3001\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3000","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3000\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3000\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3000\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3000","id":1013613219,"node_id":"PR_kwDODunzps4skusL","number":3000,"title":"Fix json loader when conversion not implemented","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-10-01T17:47:22Z","updated_at":"2021-10-01T18:05:00Z","closed_at":"2021-10-01T17:54:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/3000","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3000","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3000.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/3000.patch","merged_at":"2021-10-01T17:54:23Z"},"body":"Sometimes the arrow json parser fails if the `block_size` is too small and returns an `ArrowNotImplementedError: JSON conversion to struct...` error.\r\n\r\nBy increasing the block size it makes it work again.\r\n\r\nHopefully it should help with https:\/\/github.com\/huggingface\/datasets\/issues\/2799\r\n\r\nI tried with the file mentioned in the issue and it worked for me\r\ncc @lewtun can you try again from this branch ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3000\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3000\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2999","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2999\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2999\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2999\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2999","id":1013536933,"node_id":"PR_kwDODunzps4skgCm","number":2999,"title":"Set trivia_qa writer batch size","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-01T16:23:26Z","updated_at":"2021-10-01T16:34:55Z","closed_at":"2021-10-01T16:34:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2999","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2999","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2999.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2999.patch","merged_at":"2021-10-01T16:34:55Z"},"body":"Save some RAM when generating trivia_qa","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2999\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2999\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2998","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2998\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2998\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2998\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2998","id":1013372871,"node_id":"I_kwDODunzps48ZtfH","number":2998,"title":"cannot shuffle dataset loaded from disk","user":{"login":"pya25","id":54274249,"node_id":"MDQ6VXNlcjU0Mjc0MjQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/54274249?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pya25","html_url":"https:\/\/github.com\/pya25","followers_url":"https:\/\/api.github.com\/users\/pya25\/followers","following_url":"https:\/\/api.github.com\/users\/pya25\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pya25\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pya25\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pya25\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pya25\/orgs","repos_url":"https:\/\/api.github.com\/users\/pya25\/repos","events_url":"https:\/\/api.github.com\/users\/pya25\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pya25\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-01T13:49:52Z","updated_at":"2021-10-01T13:49:52Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\ndataset loaded from disk cannot be shuffled. \r\n\r\n## Steps to reproduce the bug\r\n```\r\nmy_dataset = load_from_disk('s3:\/\/my_file\/validate', fs=s3)\r\nsample = my_dataset.select(range(100)).shuffle(seed=1234)\r\n```\r\n\r\n## Actual results\r\n```\r\nsample = my_dataset .select(range(100)).shuffle(seed=1234)\r\n File \"\/home\/ubuntu\/anaconda3\/envs\/pytorch_p37\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 185, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/ubuntu\/anaconda3\/envs\/pytorch_p37\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 398, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/ubuntu\/anaconda3\/envs\/pytorch_p37\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 2494, in shuffle\r\n new_fingerprint=new_fingerprint,\r\n File \"\/home\/ubuntu\/anaconda3\/envs\/pytorch_p37\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 185, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/ubuntu\/anaconda3\/envs\/pytorch_p37\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 398, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/ubuntu\/anaconda3\/envs\/pytorch_p37\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 2303, in select\r\n tmp_file = tempfile.NamedTemporaryFile(\"wb\", dir=os.path.dirname(indices_cache_file_name), delete=False)\r\n File \"\/home\/ubuntu\/anaconda3\/envs\/pytorch_p37\/lib\/python3.7\/tempfile.py\", line 547, in NamedTemporaryFile\r\n (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags, output_type)\r\n File \"\/home\/ubuntu\/anaconda3\/envs\/pytorch_p37\/lib\/python3.7\/tempfile.py\", line 258, in _mkstemp_inner\r\n fd = _os.open(file, flags, 0o600)\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/tmp\/tmpnnu5uhnx\/my_file\/validate\/tmpy76d70g4'\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n\r\n- Python version: 3.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2998\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2998\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2997","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2997\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2997\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2997\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2997","id":1013270069,"node_id":"I_kwDODunzps48ZUY1","number":2997,"title":"Dataset has incorrect labels","user":{"login":"marshmellow77","id":63367770,"node_id":"MDQ6VXNlcjYzMzY3Nzcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/63367770?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/marshmellow77","html_url":"https:\/\/github.com\/marshmellow77","followers_url":"https:\/\/api.github.com\/users\/marshmellow77\/followers","following_url":"https:\/\/api.github.com\/users\/marshmellow77\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/marshmellow77\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/marshmellow77\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/marshmellow77\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/marshmellow77\/orgs","repos_url":"https:\/\/api.github.com\/users\/marshmellow77\/repos","events_url":"https:\/\/api.github.com\/users\/marshmellow77\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/marshmellow77\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-10-01T12:09:06Z","updated_at":"2021-10-01T15:32:00Z","closed_at":"2021-10-01T13:54:34Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The dataset https:\/\/huggingface.co\/datasets\/turkish_product_reviews has incorrect labels - all reviews are labelled with \"1\" (positive sentiment). None of the reviews is labelled with \"0\". See screenshot attached:\r\n\r\n![Capture](https:\/\/user-images.githubusercontent.com\/63367770\/135617428-14ce0b27-5208-4e66-a3ee-71542e3257b4.PNG)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2997\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2997\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2996","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2996\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2996\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2996\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2996","id":1013266373,"node_id":"PR_kwDODunzps4sjrP6","number":2996,"title":"Remove all query parameters when extracting protocol","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-10-01T12:05:34Z","updated_at":"2021-10-04T08:48:13Z","closed_at":"2021-10-04T08:48:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2996","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2996","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2996.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2996.patch","merged_at":"2021-10-04T08:48:13Z"},"body":"Fix `_get_extraction_protocol` to remove all query parameters, like `?raw=true`, `?dl=1`,...","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2996\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2996\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2995","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2995\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2995\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2995\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2995","id":1013143868,"node_id":"PR_kwDODunzps4sjThd","number":2995,"title":"Fix trivia_qa unfiltered","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-10-01T09:53:43Z","updated_at":"2021-10-01T10:04:11Z","closed_at":"2021-10-01T10:04:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2995","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2995","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2995.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2995.patch","merged_at":"2021-10-01T10:04:10Z"},"body":"Fix https:\/\/github.com\/huggingface\/datasets\/issues\/2993","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2995\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2995\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2994","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2994\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2994\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2994\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2994","id":1013000475,"node_id":"PR_kwDODunzps4si4I2","number":2994,"title":"Fix loading compressed CSV without streaming","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-10-01T07:28:59Z","updated_at":"2021-10-01T15:53:16Z","closed_at":"2021-10-01T15:53:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2994","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2994","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2994.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2994.patch","merged_at":"2021-10-01T15:53:15Z"},"body":"When implementing support to stream CSV files (https:\/\/github.com\/huggingface\/datasets\/commit\/ad489d4597381fc2d12c77841642cbeaecf7a2e0#diff-6f60f8d0552b75be8b3bfd09994480fd60dcd4e7eb08d02f721218c3acdd2782), a regression was introduced preventing loading compressed CSV files in non-streaming mode.\r\n\r\nThis PR fixes it, allowing loading compressed\/uncompressed CSV files in streaming\/non-streaming mode.\r\n\r\nFix #2977.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2994\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2994\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2993","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2993\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2993\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2993\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2993","id":1012702665,"node_id":"I_kwDODunzps48XJ3J","number":2993,"title":"Can't download `trivia_qa\/unfiltered`","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-09-30T23:00:18Z","updated_at":"2021-10-01T19:07:23Z","closed_at":"2021-10-01T19:07:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nFor some reason, I can't download `trivia_qa\/unfilted`. A file seems to be missing... I am able to see it fine though the viewer tough...\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> load_dataset(\"trivia_qa\", \"unfiltered\")\r\nDownloading and preparing dataset trivia_qa\/unfiltered (download: 3.07 GiB, generated: 27.23 GiB, post-processed: Unknown size, total: 30.30 GiB) to \/gpfsscratch\/rech\/six\/commun\/datasets\/trivia_qa\/unfiltered\/1.1.0\/9977a5d6f72acfd92f587de052403e8138b43bb0d1ce595016c3baf7e14deba6...\r\nTraceback (most recent call last):\r\n File \"\/gpfswork\/rech\/six\/commun\/modules\/datasets_modules\/datasets\/trivia_qa\/9977a5d6f72acfd92f587de052403e8138b43bb0d1ce595016c3baf7e14deba6\/trivia_qa.py\", line 251, in _add_context\r\n with open(os.path.join(file_dir, fname), encoding=\"utf-8\") as f:\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/gpfsscratch\/rech\/six\/commun\/datasets\/downloads\/extracted\/9fcb7eddc6afd46fd074af3c5128931dfe4b548f933c925a23847faf4c1995ad\/evidence\/wikipedia\/Peanuts.txt'\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/gpfswork\/rech\/six\/commun\/conda\/victor\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 852, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/gpfswork\/rech\/six\/commun\/conda\/victor\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 616, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/gpfswork\/rech\/six\/commun\/conda\/victor\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 693, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/gpfswork\/rech\/six\/commun\/conda\/victor\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1107, in _prepare_split\r\n disable=bool(logging.get_verbosity() == logging.NOTSET),\r\n File \"\/gpfswork\/rech\/six\/commun\/conda\/victor\/lib\/python3.7\/site-packages\/tqdm\/std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"\/gpfswork\/rech\/six\/commun\/modules\/datasets_modules\/datasets\/trivia_qa\/9977a5d6f72acfd92f587de052403e8138b43bb0d1ce595016c3baf7e14deba6\/trivia_qa.py\", line 303, in _generate_examples\r\n example = parse_example(article)\r\n File \"\/gpfswork\/rech\/six\/commun\/modules\/datasets_modules\/datasets\/trivia_qa\/9977a5d6f72acfd92f587de052403e8138b43bb0d1ce595016c3baf7e14deba6\/trivia_qa.py\", line 274, in parse_example\r\n _add_context(article.get(\"EntityPages\", []), \"WikiContext\", wiki_dir),\r\n File \"\/gpfswork\/rech\/six\/commun\/modules\/datasets_modules\/datasets\/trivia_qa\/9977a5d6f72acfd92f587de052403e8138b43bb0d1ce595016c3baf7e14deba6\/trivia_qa.py\", line 253, in _add_context\r\n except (IOError, datasets.Value(\"errors\").NotFoundError):\r\n File \"\", line 5, in __init__\r\n File \"\/gpfswork\/rech\/six\/commun\/conda\/victor\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 265, in __post_init__\r\n self.pa_type = string_to_arrow(self.dtype)\r\n File \"\/gpfswork\/rech\/six\/commun\/conda\/victor\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 134, in string_to_arrow\r\n f\"Neither {datasets_dtype} nor {datasets_dtype + '_'} seems to be a pyarrow data type. \"\r\nValueError: Neither errors nor errors_ seems to be a pyarrow data type. Please make sure to use a correct data type, see: https:\/\/arrow.apache.org\/docs\/python\/api\/datatypes.html#factory-functions\r\n```\r\n\r\n## Expected results\r\nI am able to load another subset (`rc`), but unable to load.\r\nI am not sure why the try\/except doesn't catch it...\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/9675a5a1e7b99a86f9c250f6ea5fa5d1e6d5cc7d\/datasets\/trivia_qa\/trivia_qa.py#L253\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: Linux-4.18.0-147.51.2.el8_1.x86_64-x86_64-with-redhat-8.1-Ootpa\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2993\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2993\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2992","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2992\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2992\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2992\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2992","id":1012325594,"node_id":"PR_kwDODunzps4sg4ZP","number":2992,"title":"Fix f1 metric with None average","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-30T15:31:57Z","updated_at":"2021-10-01T14:17:39Z","closed_at":"2021-10-01T14:17:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2992","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2992","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2992.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2992.patch","merged_at":"2021-10-01T14:17:38Z"},"body":"Fix #2979.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2992\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2992\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2991","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2991\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2991\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2991\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2991","id":1012174823,"node_id":"I_kwDODunzps48VI_n","number":2991,"title":"add docmentation for the `Unix style pattern` matching feature that can be leverage for `data_files` into `load_dataset`","user":{"login":"SaulLu","id":55560583,"node_id":"MDQ6VXNlcjU1NTYwNTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55560583?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SaulLu","html_url":"https:\/\/github.com\/SaulLu","followers_url":"https:\/\/api.github.com\/users\/SaulLu\/followers","following_url":"https:\/\/api.github.com\/users\/SaulLu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SaulLu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SaulLu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SaulLu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SaulLu\/orgs","repos_url":"https:\/\/api.github.com\/users\/SaulLu\/repos","events_url":"https:\/\/api.github.com\/users\/SaulLu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SaulLu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-30T13:22:01Z","updated_at":"2021-09-30T13:22:01Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Unless I'm mistaken, it seems that in the new documentation it is no longer mentioned that you can use Unix style pattern matching in the `data_files` argument of the `load_dataset` method.\r\n\r\nThis feature was mentioned [here](https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#from-a-community-dataset-on-the-hugging-face-hub) in the previous documentation.\r\n\r\nI'd love to hear your opinion @lhoestq , @albertvillanova and @stevhliu ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2991\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2991\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2990","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2990\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2990\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2990\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2990","id":1012097418,"node_id":"PR_kwDODunzps4sgLt5","number":2990,"title":"Make Dataset.map accept list of np.array","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-30T12:08:54Z","updated_at":"2021-10-01T13:57:46Z","closed_at":"2021-10-01T13:57:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2990","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2990","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2990.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2990.patch","merged_at":"2021-10-01T13:57:45Z"},"body":"Fix #2987.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2990\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2990\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2989","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2989\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2989\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2989\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2989","id":1011220375,"node_id":"PR_kwDODunzps4sdlt1","number":2989,"title":"Add CommonLanguage","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-29T17:21:30Z","updated_at":"2021-10-01T17:36:39Z","closed_at":"2021-10-01T17:00:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2989","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2989","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2989.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2989.patch","merged_at":"2021-10-01T17:00:03Z"},"body":"This PR adds the Common Language dataset (https:\/\/zenodo.org\/record\/5036977)\r\nThe dataset is intended for language-identification speech classifiers and is already used by models on the Hub:\r\n* https:\/\/huggingface.co\/speechbrain\/lang-id-commonlanguage_ecapa\r\n* https:\/\/huggingface.co\/anton-l\/wav2vec2-base-langid\r\n\r\ncc @patrickvonplaten ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2989\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2989\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2988","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2988\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2988\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2988\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2988","id":1011148017,"node_id":"I_kwDODunzps48ROTx","number":2988,"title":"IndexError: Invalid key: 14 is out of bounds for size 0","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-09-29T16:04:24Z","updated_at":"2022-01-05T06:38:06Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\nHi. I am trying to implement stochastic weighted averaging optimizer with transformer library as described here https:\/\/pytorch.org\/blog\/pytorch-1.6-now-includes-stochastic-weight-averaging\/ , for this I am using a run_clm.py codes which is working fine before adding SWA optimizer, the moment I modify the model with `swa_model = AveragedModel(model)` in this script, I am getting the below error, since I am NOT touching the dataloader part, I am confused why this is occurring, I very much appreciate your opinion on this @lhoestq \r\n\r\n## Steps to reproduce the bug\r\n```\r\nTraceback (most recent call last):\r\n File \"run_clm.py\", line 723, in \r\n main()\r\n File \"run_clm.py\", line 669, in main\r\n train_result = trainer.train(resume_from_checkpoint=checkpoint)\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/transformers\/trainer.py\", line 1258, in train\r\n for step, inputs in enumerate(epoch_iterator):\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/torch\/utils\/data\/dataloader.py\", line 435, in __next__\r\n data = self._next_data()\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/torch\/utils\/data\/dataloader.py\", line 475, in _next_data\r\n data = self._dataset_fetcher.fetch(index) # may raise StopIteration\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/torch\/utils\/data\/_utils\/fetch.py\", line 44, in fetch\r\n data = [self.dataset[idx] for idx in possibly_batched_index]\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/torch\/utils\/data\/_utils\/fetch.py\", line 44, in \r\n data = [self.dataset[idx] for idx in possibly_batched_index]\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1530, in __getitem__\r\n format_kwargs=self._format_kwargs,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1517, in _getitem\r\n pa_subtable = query_table(self._data, key, indices=self._indices if self._indices is not None else None)\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets\/formatting\/formatting.py\", line 368, in query_table\r\n _check_valid_index_key(key, size)\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets\/formatting\/formatting.py\", line 311, in _check_valid_index_key\r\n raise IndexError(f\"Invalid key: {key} is out of bounds for size {size}\")\r\nIndexError: Invalid key: 14 is out of bounds for size 0\r\n```\r\n\r\n\r\n\r\n## Expected results\r\nnot getting the index error\r\n\r\n## Actual results\r\nPlease see the above\r\n\r\n## Environment info\r\n\r\n- `datasets` version: datasets 1.12.1 \r\n- Platform: linux\r\n- Python version: 3.7.11 \r\n- PyArrow version: 5.0.0 \r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2988\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2988\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2987","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2987\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2987\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2987\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2987","id":1011026141,"node_id":"I_kwDODunzps48Qwjd","number":2987,"title":"ArrowInvalid: Can only convert 1-dimensional array values","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-29T14:18:52Z","updated_at":"2021-10-01T13:57:45Z","closed_at":"2021-10-01T13:57:45Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nFor the ViT and LayoutLMv2 demo notebooks in my [Transformers-Tutorials repo](https:\/\/github.com\/NielsRogge\/Transformers-Tutorials), people reported an ArrowInvalid issue after applying the following function to a Dataset:\r\n\r\n```\r\ndef preprocess_data(examples):\r\n images = [Image.open(path).convert(\"RGB\") for path in examples['image_path']]\r\n words = examples['words']\r\n boxes = examples['bboxes']\r\n word_labels = examples['ner_tags']\r\n \r\n encoded_inputs = processor(images, words, boxes=boxes, word_labels=word_labels,\r\n padding=\"max_length\", truncation=True)\r\n \r\n return encoded_inputs \r\n```\r\n\r\n```\r\nFull trace:\r\n\r\n---------------------------------------------------------------------------\r\nArrowInvalid Traceback (most recent call last)\r\n in ()\r\n 27 \r\n 28 train_dataset = datasets['train'].map(preprocess_data, batched=True, remove_columns=datasets['train'].column_names,\r\n---> 29 features=features)\r\n 30 test_dataset = datasets['test'].map(preprocess_data, batched=True, remove_columns=datasets['test'].column_names,\r\n 31 features=features)\r\n\r\n13 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint, desc)\r\n 1701 new_fingerprint=new_fingerprint,\r\n 1702 disable_tqdm=disable_tqdm,\r\n-> 1703 desc=desc,\r\n 1704 )\r\n 1705 else:\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 183 }\r\n 184 # apply actual function\r\n--> 185 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 186 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 187 # re-apply format to the output\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 396 # Call actual function\r\n 397 \r\n--> 398 out = func(self, *args, **kwargs)\r\n 399 \r\n 400 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, disable_tqdm, desc, cache_only)\r\n 2063 writer.write_table(batch)\r\n 2064 else:\r\n-> 2065 writer.write_batch(batch)\r\n 2066 if update_data and writer is not None:\r\n 2067 writer.finalize() # close_stream=bool(buf_writer is None)) # We only close if we are writing in a file\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in write_batch(self, batch_examples, writer_batch_size)\r\n 409 typed_sequence = OptimizedTypedSequence(batch_examples[col], type=col_type, try_type=col_try_type, col=col)\r\n 410 typed_sequence_examples[col] = typed_sequence\r\n--> 411 pa_table = pa.Table.from_pydict(typed_sequence_examples)\r\n 412 self.write_table(pa_table, writer_batch_size)\r\n 413 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.from_pydict()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/array.pxi in pyarrow.lib.asarray()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/array.pxi in pyarrow.lib.array()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/array.pxi in pyarrow.lib._handle_arrow_array_protocol()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in __arrow_array__(self, type)\r\n 106 storage = numpy_to_pyarrow_listarray(self.data, type=type.value_type)\r\n 107 else:\r\n--> 108 storage = pa.array(self.data, type.storage_dtype)\r\n 109 out = pa.ExtensionArray.from_storage(type, storage)\r\n 110 elif isinstance(self.data, np.ndarray):\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/array.pxi in pyarrow.lib.array()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/array.pxi in pyarrow.lib._sequence_to_array()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowInvalid: Can only convert 1-dimensional array values\r\n```\r\nIt can be fixed by adding the following line:\r\n\r\n```diff\r\ndef preprocess_data(examples):\r\n images = [Image.open(path).convert(\"RGB\") for path in examples['image_path']]\r\n words = examples['words']\r\n boxes = examples['bboxes']\r\n word_labels = examples['ner_tags']\r\n \r\n encoded_inputs = processor(images, words, boxes=boxes, word_labels=word_labels,\r\n padding=\"max_length\", truncation=True)\r\n+ encoded_inputs[\"image\"] = np.array(encoded_inputs[\"image\"])\r\n \r\n return encoded_inputs \r\n```\r\n\r\nHowever, would be great if this can be fixed within Datasets itself.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2987\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2987\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2986","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2986\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2986\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2986\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2986","id":1010792783,"node_id":"PR_kwDODunzps4scSHR","number":2986,"title":"Refac module factory + avoid etag requests for hub datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-09-29T10:42:00Z","updated_at":"2021-10-11T11:05:53Z","closed_at":"2021-10-11T11:05:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2986","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2986","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2986.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2986.patch","merged_at":"2021-10-11T11:05:51Z"},"body":"## Refactor the module factory\r\n\r\nWhen trying to extend the `data_files` logic to avoid doing unnecessary ETag requests, I noticed that the module preparation mechanism needed a refactor:\r\n- the function was 600 lines long\r\n- it was not readable\r\n- it contained many different cases that made it complex to maintain\r\n- it was hard to properly test it\r\n- it was hard to extend without breaking anything\r\n\r\nThe module preparation mechanism is in charge of taking the name of a dataset or a metric given by the user (ex: \"squad\", \"accuracy\", \"lhoestq\/test\", \"path\/to\/my\/script.py\", \"path\/to\/my\/data\/directory\", \"json\", \"csv\") and return a module (possibly downloaded from the Hub) that contains the dataset builder or the metric class to use.\r\n\r\n### Implementation details\r\n\r\nI decided to separate all these use cases into different dataset\/metric module factories.\r\n\r\nFirst, the metric module factories:\r\n- **CanonicalMetricModuleFactory**: \"accuracy\", \"rouge\", ...\r\n- **LocalMetricModuleFactory**: \"path\/to\/my\/metric.py\"\r\n\r\nThen, the dataset module factories:\r\n- **CanonicalDatasetModuleFactory**: \"squad\", \"glue\", ...\r\n- **CommunityDatasetModuleFactoryWithScript**: \"lhoestq\/test\"\r\n- **CommunityDatasetModuleFactoryWithoutScript**: \"lhoestq\/demo1\"\r\n- **PackagedDatasetModuleFactory**: \"json\", \"csv\", ...\r\n- **LocalDatasetModuleFactoryWithScript**: \"path\/to\/my\/script.py\"\r\n- **LocalDatasetModuleFactoryWithoutScript**: \"path\/to\/my\/data\/directory\"\r\n\r\nAnd finally, additional factories when users have no internet:\r\n- **CachedDatasetModuleFactory**\r\n- **CachedMetricModuleFactory**\r\n\r\n### Breaking changes\r\n\r\nOne thing is that I still don't know at what extent we want to keep backward compatibility for `prepare_module`. For now I just kept it (except I removed two parameters) just in case, but it's not used anywhere anymore.\r\n\r\n## Avoid etag requests for hub datasets\r\n\r\nTo do this I added a class `DataFilesDict` that can be hashed to define the cache directory of the dataset.\r\nIt contains the usual data files formatted as `{\"train\": [\"train.txt\"]}` for example.\r\nBut each list of file is a `DataFilesList` that also has a `origin_metadata` attribute that contains metadata about the origin of each file:\r\n- for URLs: it stores the ETags of the files\r\n- for local files: it stores the last modification data\r\n- for files from a Hugging Face repository on the Hub: it stores the pattern (`*`, `*.csv`, \"train.txt\", etc.) and the commit sha of the repository (so there're no ETag requests !)\r\n\r\nThis way if any file changes, the hash of the `DataFilesDict` changes too !\r\n\r\nYou can instantiate a `DataFilesDict` by using patterns for local\/remote files or files in a HF repository:\r\n- for local\/remote files: `DataFilesDict.from_local_or_remote(patterns)`\r\n- for files in a HF repository: `DataFilesDict.from_hf_repo(patterns, dataset_info)`\r\n\r\nFix #2859 \r\n\r\n## TODO\r\n\r\nFix the latest test:\r\n- [x] fix the call to dataset_info in offline mode (related to https:\/\/github.com\/huggingface\/huggingface_hub\/issues\/372)\r\n\r\nAdd some more tests:\r\n- [x] test all the factories\r\n- [x] test the new data files logic\r\n\r\nOther:\r\n- [x] docstrings\r\n- [x] comments","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2986\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":2,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2986\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2985","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2985\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2985\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2985\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2985","id":1010500433,"node_id":"PR_kwDODunzps4sbbbo","number":2985,"title":"add new dataset kan_hope","user":{"login":"adeepH","id":46108405,"node_id":"MDQ6VXNlcjQ2MTA4NDA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46108405?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adeepH","html_url":"https:\/\/github.com\/adeepH","followers_url":"https:\/\/api.github.com\/users\/adeepH\/followers","following_url":"https:\/\/api.github.com\/users\/adeepH\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adeepH\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adeepH\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adeepH\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adeepH\/orgs","repos_url":"https:\/\/api.github.com\/users\/adeepH\/repos","events_url":"https:\/\/api.github.com\/users\/adeepH\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adeepH\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-29T05:20:28Z","updated_at":"2021-10-01T16:55:19Z","closed_at":"2021-10-01T16:55:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2985","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2985","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2985.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2985.patch","merged_at":"2021-10-01T16:55:19Z"},"body":"## Adding a Dataset\r\n- **Name:** *KanHope*\r\n- **Description:** *A code-mixed English-Kannada dataset for Hope speech detection*\r\n- **Task:** *Binary Text Classification*\r\n- **Paper:** *https:\/\/arxiv.org\/abs\/2108.04616* \r\n- **Data:** *https:\/\/github.com\/adeepH\/kan_hope\/tree\/main\/dataset*\r\n- **Motivation:** *The dataset is amongst the very few resources available for code-mixed low-resourced Dravidian languages of India*","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2985\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2985\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2984","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2984\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2984\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2984\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2984","id":1010484326,"node_id":"I_kwDODunzps48OsRm","number":2984,"title":"Exceeded maximum rows when reading large files","user":{"login":"zijwang","id":25057983,"node_id":"MDQ6VXNlcjI1MDU3OTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25057983?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zijwang","html_url":"https:\/\/github.com\/zijwang","followers_url":"https:\/\/api.github.com\/users\/zijwang\/followers","following_url":"https:\/\/api.github.com\/users\/zijwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zijwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zijwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zijwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zijwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/zijwang\/repos","events_url":"https:\/\/api.github.com\/users\/zijwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zijwang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-29T04:49:22Z","updated_at":"2021-10-12T06:05:42Z","closed_at":"2021-10-12T06:05:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\nWhen using `load_dataset` with json files, if the files are too large, there will be \"Exceeded maximum rows\" error.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset = load_dataset('json', data_files=data_files) # data files have 3M rows in a single file\r\n```\r\n\r\n## Expected results\r\nNo error\r\n\r\n## Actual results\r\n```\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/site-packages\/datasets\/packaged_modules\/json\/json.py in _generate_tables(self, files)\r\n 134 with open(file, encoding=\"utf-8\") as f:\r\n--> 135 dataset = json.load(f)\r\n 136 except json.JSONDecodeError:\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/json\/__init__.py in load(fp, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)\r\n 292 \"\"\"\r\n--> 293 return loads(fp.read(),\r\n 294 cls=cls, object_hook=object_hook,\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/json\/__init__.py in loads(s, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)\r\n 345 parse_constant is None and object_pairs_hook is None and not kw):\r\n--> 346 return _default_decoder.decode(s)\r\n 347 if cls is None:\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/json\/decoder.py in decode(self, s, _w)\r\n 339 if end != len(s):\r\n--> 340 raise JSONDecodeError(\"Extra data\", s, end)\r\n 341 return obj\r\n\r\nJSONDecodeError: Extra data: line 2 column 1 (char 20321)\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nArrowInvalid Traceback (most recent call last)\r\n in \r\n----> 1 dataset = load_dataset('json', data_files=data_files)\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, task, streaming, **config_kwargs)\r\n 841 \r\n 842 # Download and prepare data\r\n--> 843 builder_instance.download_and_prepare(\r\n 844 download_config=download_config,\r\n 845 download_mode=download_mode,\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/site-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 606 logger.warning(\"HF google storage unreachable. Downloading and preparing it from source\")\r\n 607 if not downloaded_from_gcs:\r\n--> 608 self._download_and_prepare(\r\n 609 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 610 )\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/site-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 684 try:\r\n 685 # Prepare split will record examples associated to the split\r\n--> 686 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 687 except OSError as e:\r\n 688 raise OSError(\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/site-packages\/datasets\/builder.py in _prepare_split(self, split_generator)\r\n 1153 generator = self._generate_tables(**split_generator.gen_kwargs)\r\n 1154 with ArrowWriter(features=self.info.features, path=fpath) as writer:\r\n-> 1155 for key, table in utils.tqdm(\r\n 1156 generator, unit=\" tables\", leave=False, disable=bool(logging.get_verbosity() == logging.NOTSET)\r\n 1157 ):\r\n\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/site-packages\/datasets\/packaged_modules\/json\/json.py in _generate_tables(self, files)\r\n 135 dataset = json.load(f)\r\n 136 except json.JSONDecodeError:\r\n--> 137 raise e\r\n 138 raise ValueError(\r\n 139 f\"Not able to read records in the JSON file at {file}. \"\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/site-packages\/datasets\/packaged_modules\/json\/json.py in _generate_tables(self, files)\r\n 114 while True:\r\n 115 try:\r\n--> 116 pa_table = paj.read_json(\r\n 117 BytesIO(batch), read_options=paj.ReadOptions(block_size=block_size)\r\n 118 )\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/site-packages\/pyarrow\/_json.pyx in pyarrow._json.read_json()\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/anaconda3\/envs\/python\/lib\/python3.9\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowInvalid: Exceeded maximum rows\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform: Linux\r\n- Python version: 3.9\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2984\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2984\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2983","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2983\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2983\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2983\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2983","id":1010263058,"node_id":"PR_kwDODunzps4saw_v","number":2983,"title":"added SwissJudgmentPrediction dataset","user":{"login":"JoelNiklaus","id":3775944,"node_id":"MDQ6VXNlcjM3NzU5NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3775944?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JoelNiklaus","html_url":"https:\/\/github.com\/JoelNiklaus","followers_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/followers","following_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/orgs","repos_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/repos","events_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-28T22:17:56Z","updated_at":"2021-10-01T16:03:05Z","closed_at":"2021-10-01T16:03:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2983","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2983","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2983.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2983.patch","merged_at":"2021-10-01T16:03:05Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2983\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2983\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2982","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2982\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2982\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2982\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2982","id":1010118418,"node_id":"PR_kwDODunzps4saVLh","number":2982,"title":"Add the Math Aptitude Test of Heuristics dataset.","user":{"login":"hacobe","id":91226467,"node_id":"MDQ6VXNlcjkxMjI2NDY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/91226467?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hacobe","html_url":"https:\/\/github.com\/hacobe","followers_url":"https:\/\/api.github.com\/users\/hacobe\/followers","following_url":"https:\/\/api.github.com\/users\/hacobe\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hacobe\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hacobe\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hacobe\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hacobe\/orgs","repos_url":"https:\/\/api.github.com\/users\/hacobe\/repos","events_url":"https:\/\/api.github.com\/users\/hacobe\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hacobe\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-28T19:18:37Z","updated_at":"2021-10-01T19:51:23Z","closed_at":"2021-10-01T12:21:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2982","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2982","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2982.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2982.patch","merged_at":"2021-10-01T12:21:00Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2982\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2982\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2981","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2981\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2981\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2981\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2981","id":1009969310,"node_id":"PR_kwDODunzps4sZ4ke","number":2981,"title":"add wit dataset","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-09-28T16:34:49Z","updated_at":"2022-01-05T13:08:52Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2981","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2981","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2981.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2981.patch","merged_at":null},"body":"Resolves #2902 based on conversation there - would also close #2810. Open to suggestions\/help \ud83d\ude00\r\n\r\nCC @hassiahk @lhoestq @yjernite ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2981\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":2},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2981\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2980","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2980\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2980\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2980\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2980","id":1009873482,"node_id":"I_kwDODunzps48MXJK","number":2980,"title":"OpenSLR 25: ASR data for Amharic, Swahili and Wolof","user":{"login":"cdleong","id":4109253,"node_id":"MDQ6VXNlcjQxMDkyNTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4109253?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cdleong","html_url":"https:\/\/github.com\/cdleong","followers_url":"https:\/\/api.github.com\/users\/cdleong\/followers","following_url":"https:\/\/api.github.com\/users\/cdleong\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cdleong\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cdleong\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cdleong\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cdleong\/orgs","repos_url":"https:\/\/api.github.com\/users\/cdleong\/repos","events_url":"https:\/\/api.github.com\/users\/cdleong\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cdleong\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-28T15:04:36Z","updated_at":"2021-09-29T17:25:14Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *SLR25*\r\n- **Description:** *Subset 25 from OpenSLR. Other subsets have been added to https:\/\/huggingface.co\/datasets\/openslr, 25 covers Amharic, Swahili and Wolof data*\r\n- **Paper:** *https:\/\/www.openslr.org\/25\/ has citations for each of the three subsubsets. *\r\n- **Data:** *Currently the three links to the .tar.bz2 files can be found a thttps:\/\/www.openslr.org\/25\/*\r\n- **Motivation:** *Increase ASR data for underrepresented African languages. Also, other subsets of OpenSLR speech recognition have been uploaded, so this would be easy.*\r\n\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/openslr\/openslr.py already has been created for various other OpenSLR subsets, this should be relatively straightforward to do. \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2980\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2980\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2979","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2979\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2979\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2979\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2979","id":1009634147,"node_id":"I_kwDODunzps48Lctj","number":2979,"title":"ValueError when computing f1 metric with average None","user":{"login":"asofiaoliveira","id":74454835,"node_id":"MDQ6VXNlcjc0NDU0ODM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/74454835?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/asofiaoliveira","html_url":"https:\/\/github.com\/asofiaoliveira","followers_url":"https:\/\/api.github.com\/users\/asofiaoliveira\/followers","following_url":"https:\/\/api.github.com\/users\/asofiaoliveira\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/asofiaoliveira\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/asofiaoliveira\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/asofiaoliveira\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/asofiaoliveira\/orgs","repos_url":"https:\/\/api.github.com\/users\/asofiaoliveira\/repos","events_url":"https:\/\/api.github.com\/users\/asofiaoliveira\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/asofiaoliveira\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-28T11:34:53Z","updated_at":"2021-10-01T14:17:38Z","closed_at":"2021-10-01T14:17:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen I try to compute the f1 score for each class in a multiclass classification problem, I get a ValueError. The same happens with recall and precision. I traced the error to the `.item()` in these scripts, which is probably there for the other averages. E.g. from f1.py:\r\n```python \r\nreturn {\r\n \"f1\": f1_score(\r\n references,\r\n predictions,\r\n labels=labels,\r\n pos_label=pos_label,\r\n average=average,\r\n sample_weight=sample_weight,\r\n ).item(),\r\n }\r\n```\r\nSince the result is an array with more than one item, the `.item()` throws the error. I didn't submit a PR because this might be needed for the other averages, I'm not very familiar with the library\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_metric\r\nmetric = load_metric(\"f1\")\r\nmetric.add_batch(predictions=[2,34,1,34,1,2,3], references=[23,52,1,3,523,5,8])\r\nmetric.compute(average=None)\r\n```\r\n\r\n## Expected results\r\n`array([0.66666667, 0. , 0. , 0. , 0. ,\r\n 0. , 0. , 0. , 0. ])`\r\n\r\n## Actual results\r\nValueError: can only convert an array of size 1 to a Python scalar\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.9.5\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2979\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2979\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2978","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2978\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2978\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2978\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2978","id":1009521419,"node_id":"I_kwDODunzps48LBML","number":2978,"title":"Run CI tests against non-production server","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-28T09:41:26Z","updated_at":"2021-09-28T15:23:50Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, the CI test suite performs requests to the HF production server.\r\n\r\nAs discussed with @elishowk, we should refactor our tests to use the HF staging server instead, like `huggingface_hub` and `transformers`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2978\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2978\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2977","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2977\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2977\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2977\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2977","id":1009378692,"node_id":"I_kwDODunzps48KeWE","number":2977,"title":"Impossible to load compressed csv","user":{"login":"Valahaar","id":19476123,"node_id":"MDQ6VXNlcjE5NDc2MTIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19476123?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Valahaar","html_url":"https:\/\/github.com\/Valahaar","followers_url":"https:\/\/api.github.com\/users\/Valahaar\/followers","following_url":"https:\/\/api.github.com\/users\/Valahaar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Valahaar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Valahaar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Valahaar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Valahaar\/orgs","repos_url":"https:\/\/api.github.com\/users\/Valahaar\/repos","events_url":"https:\/\/api.github.com\/users\/Valahaar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Valahaar\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-28T07:18:54Z","updated_at":"2021-10-01T15:53:16Z","closed_at":"2021-10-01T15:53:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIt is not possible to load from a compressed csv anymore.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset('csv', data_files=['\/path\/to\/csv.bz2'])\r\n```\r\n\r\n## Problem and possible solution\r\nThis used to work, but the commit that broke it is [this one](https:\/\/github.com\/huggingface\/datasets\/commit\/ad489d4597381fc2d12c77841642cbeaecf7a2e0#diff-6f60f8d0552b75be8b3bfd09994480fd60dcd4e7eb08d02f721218c3acdd2782).\r\n\r\n`pandas` usually gets the compression information from the filename itself (which was previously directly passed). Now, since it gets a file descriptor, it might be good to auto-infer the compression or let the user pass the `compression` kwarg to `load_dataset` (or maybe warn the user if the file ends with a commonly known compression scheme?).\r\n\r\n## Environment info\r\n- `datasets` version: 1.10.0 (and over)\r\n- Platform: Linux-5.8.0-45-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2977\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2977\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2976","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2976\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2976\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2976\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2976","id":1008647889,"node_id":"I_kwDODunzps48Hr7R","number":2976,"title":"Can't load dataset","user":{"login":"mskovalova","id":77006774,"node_id":"MDQ6VXNlcjc3MDA2Nzc0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/77006774?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mskovalova","html_url":"https:\/\/github.com\/mskovalova","followers_url":"https:\/\/api.github.com\/users\/mskovalova\/followers","following_url":"https:\/\/api.github.com\/users\/mskovalova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mskovalova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mskovalova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mskovalova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mskovalova\/orgs","repos_url":"https:\/\/api.github.com\/users\/mskovalova\/repos","events_url":"https:\/\/api.github.com\/users\/mskovalova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mskovalova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-27T21:38:14Z","updated_at":"2021-09-28T06:53:01Z","closed_at":"2021-09-28T06:53:01Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to load a wikitext dataset\r\n\r\n```\r\nfrom datasets import load_dataset\r\nraw_datasets = load_dataset(\"wikitext\")\r\n```\r\n\r\nValueError: Config name is missing.\r\nPlease pick one among the available configs: ['wikitext-103-raw-v1', 'wikitext-2-raw-v1', 'wikitext-103-v1', 'wikitext-2-v1']\r\nExample of usage:\r\n\t`load_dataset('wikitext', 'wikitext-103-raw-v1')`.\r\n\r\nIf I try\r\n```\r\nfrom datasets import load_dataset\r\nraw_datasets = load_dataset(\"wikitext-2-v1\")\r\n```\r\n\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.12.1\/datasets\/wikitext-2-v1\/wikitext-2-v1.py\r\n\r\n#### Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic (colab)\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2976\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2976\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2975","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2975\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2975\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2975\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2975","id":1008444654,"node_id":"PR_kwDODunzps4sVAOt","number":2975,"title":"ignore dummy folder and dataset_infos.json","user":{"login":"Ishan-Kumar2","id":46553104,"node_id":"MDQ6VXNlcjQ2NTUzMTA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46553104?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Ishan-Kumar2","html_url":"https:\/\/github.com\/Ishan-Kumar2","followers_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/followers","following_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/orgs","repos_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/repos","events_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Ishan-Kumar2\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-27T18:09:03Z","updated_at":"2021-09-29T09:45:38Z","closed_at":"2021-09-29T09:05:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2975","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2975","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2975.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2975.patch","merged_at":"2021-09-29T09:05:38Z"},"body":"Fixes #2877\r\n\r\nAdded the `dataset_infos.json` to the ignored files list and also added check to ignore files which have parent directory as `dummy`.\r\nLet me know if it is correct. Thanks :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2975\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2975\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2974","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2974\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2974\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2974\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2974","id":1008247787,"node_id":"PR_kwDODunzps4sUZCX","number":2974,"title":"Actually disable dummy labels by default","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-27T14:50:20Z","updated_at":"2021-09-29T09:04:42Z","closed_at":"2021-09-29T09:04:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2974","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2974","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2974.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2974.patch","merged_at":"2021-09-29T09:04:41Z"},"body":"So I might have just changed the docstring instead of the actual default argument value and not realized.\r\n\r\n@lhoestq I'm sorry >.>","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2974\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2974\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2973","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2973\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2973\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2973\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2973","id":1007894592,"node_id":"PR_kwDODunzps4sTRvk","number":2973,"title":"Fix JSON metadata of masakhaner dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-27T09:09:08Z","updated_at":"2021-09-27T12:59:59Z","closed_at":"2021-09-27T12:59:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2973","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2973","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2973.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2973.patch","merged_at":"2021-09-27T12:59:58Z"},"body":"Fix #2971.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2973\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2973\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2972","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2972\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2972\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2972\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2972","id":1007808714,"node_id":"I_kwDODunzps48EfDK","number":2972,"title":"OSError: Not enough disk space.","user":{"login":"qqaatw","id":24835382,"node_id":"MDQ6VXNlcjI0ODM1Mzgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24835382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/qqaatw","html_url":"https:\/\/github.com\/qqaatw","followers_url":"https:\/\/api.github.com\/users\/qqaatw\/followers","following_url":"https:\/\/api.github.com\/users\/qqaatw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/qqaatw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/qqaatw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/qqaatw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/qqaatw\/orgs","repos_url":"https:\/\/api.github.com\/users\/qqaatw\/repos","events_url":"https:\/\/api.github.com\/users\/qqaatw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/qqaatw\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-09-27T07:41:22Z","updated_at":"2021-09-28T06:45:27Z","closed_at":"2021-09-28T06:43:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI'm trying to download `natural_questions` dataset from the Internet, and I've specified the cache_dir which locates in a mounted disk and has enough disk space. However, even though the space is enough, the disk space checking function still reports the space of root `\/` disk having no enough space. \r\n\r\nThe file system structure is like below. The root `\/` has `115G` disk space available, and the `sda1` is mounted to `\/mnt`, which has `1.2T` disk space available:\r\n```\r\n\/\r\n\/mnt\/sda1\/path\/to\/args.dataset_cache_dir \r\n```\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset_config = DownloadConfig(\r\n cache_dir=os.path.abspath(args.dataset_cache_dir),\r\n resume_download=True,\r\n)\r\ndataset = load_dataset(\"natural_questions\", download_config=dataset_config)\r\n```\r\n\r\n## Expected results\r\n\r\nCan download the dataset without an error.\r\n\r\n## Actual results\r\n\r\nThe following error raised:\r\n```\r\nOSError: Not enough disk space. Needed: 134.92 GiB (download: 41.97 GiB, generated: 92.95 GiB, post-processed: Unknown size)\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Ubuntu 18.04\r\n- Python version: 3.8.10\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2972\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2972\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2971","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2971\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2971\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2971\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2971","id":1007696522,"node_id":"I_kwDODunzps48EDqK","number":2971,"title":"masakhaner dataset load problem","user":{"login":"ontocord","id":8900094,"node_id":"MDQ6VXNlcjg5MDAwOTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8900094?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ontocord","html_url":"https:\/\/github.com\/ontocord","followers_url":"https:\/\/api.github.com\/users\/ontocord\/followers","following_url":"https:\/\/api.github.com\/users\/ontocord\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ontocord\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ontocord\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ontocord\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ontocord\/orgs","repos_url":"https:\/\/api.github.com\/users\/ontocord\/repos","events_url":"https:\/\/api.github.com\/users\/ontocord\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ontocord\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-27T04:59:07Z","updated_at":"2021-09-27T12:59:59Z","closed_at":"2021-09-27T12:59:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nMasakhaner dataset is not loading\r\n\r\n## Steps to reproduce the bug\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"masakhaner\",'amh')\r\n```\r\n\r\n## Expected results\r\nExpected the return of a dataset\r\n\r\n## Actual results\r\n\r\n```\r\n\r\nNonMatchingSplitsSizesError Traceback (most recent call last)\r\n in ()\r\n 1 from datasets import load_dataset\r\n 2 \r\n----> 3 dataset = load_dataset(\"masakhaner\",'amh')\r\n\r\n\r\n\r\n3 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/info_utils.py\r\n in verify_splits(expected_splits, recorded_splits)\r\n 72 ]\r\n 73 if len(bad_splits) > 0:\r\n---> 74 raise NonMatchingSplitsSizesError(str(bad_splits))\r\n 75 logger.info(\"All the splits matched successfully.\")\r\n 76 \r\n\r\nNonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=639927, num_examples=1751, dataset_name='masakhaner'), 'recorded': SplitInfo(name='train', num_bytes=639911, num_examples=1750, dataset_name='masakhaner')}, {'expected': SplitInfo(name='validation', num_bytes=92768, num_examples=251, dataset_name='masakhaner'), 'recorded': SplitInfo(name='validation', num_bytes=92753, num_examples=250, dataset_name='masakhaner')}, {'expected': SplitInfo(name='test', num_bytes=184286, num_examples=501, dataset_name='masakhaner'), 'recorded': SplitInfo(name='test', num_bytes=184271, num_examples=500, dataset_name='masakhaner')}]\r\n```\r\n\r\n## Environment info\r\nGoogle Colab\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2971\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2971\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2970","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2970\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2970\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2970\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2970","id":1007340089,"node_id":"I_kwDODunzps48Cso5","number":2970,"title":"Magnet\u2019s","user":{"login":"rcacho172","id":90449239,"node_id":"MDQ6VXNlcjkwNDQ5MjM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90449239?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rcacho172","html_url":"https:\/\/github.com\/rcacho172","followers_url":"https:\/\/api.github.com\/users\/rcacho172\/followers","following_url":"https:\/\/api.github.com\/users\/rcacho172\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rcacho172\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rcacho172\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rcacho172\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rcacho172\/orgs","repos_url":"https:\/\/api.github.com\/users\/rcacho172\/repos","events_url":"https:\/\/api.github.com\/users\/rcacho172\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rcacho172\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-26T09:50:29Z","updated_at":"2021-09-26T10:38:59Z","closed_at":"2021-09-26T10:38:59Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2970\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2970\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2969","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2969\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2969\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2969\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2969","id":1007217867,"node_id":"I_kwDODunzps48COzL","number":2969,"title":"medical-dialog error","user":{"login":"smeyerhot","id":43877130,"node_id":"MDQ6VXNlcjQzODc3MTMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43877130?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/smeyerhot","html_url":"https:\/\/github.com\/smeyerhot","followers_url":"https:\/\/api.github.com\/users\/smeyerhot\/followers","following_url":"https:\/\/api.github.com\/users\/smeyerhot\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/smeyerhot\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/smeyerhot\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/smeyerhot\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/smeyerhot\/orgs","repos_url":"https:\/\/api.github.com\/users\/smeyerhot\/repos","events_url":"https:\/\/api.github.com\/users\/smeyerhot\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/smeyerhot\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-25T23:08:44Z","updated_at":"2021-10-11T07:46:42Z","closed_at":"2021-10-11T07:46:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\nWhen I attempt to download the huggingface datatset medical_dialog it errors out midway through\r\n## Steps to reproduce the bug\r\n```python\r\nraw_datasets = load_dataset(\"medical_dialog\", \"en\", split=\"train\", download_mode=\"force_redownload\", data_dir=\".\/Medical-Dialogue-Dataset-English\")\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\nNo error\r\n## Actual results\r\n\r\n```\r\n3 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/info_utils.py in verify_splits(expected_splits, recorded_splits)\r\n 72 ]\r\n 73 if len(bad_splits) > 0:\r\n---> 74 raise NonMatchingSplitsSizesError(str(bad_splits))\r\n 75 logger.info(\"All the splits matched successfully.\")\r\n 76 \r\n\r\nNonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=0, num_examples=0, dataset_name='medical_dialog'), 'recorded': SplitInfo(name='train', num_bytes=295097913, num_examples=229674, dataset_name='medical_dialog')}]\r\n```\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.21.1\r\n- Platform: colab\r\n- Python version: colab 3.7\r\n- PyArrow version: N\/A\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2969\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2969\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2968","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2968\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2968\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2968\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2968","id":1007209488,"node_id":"I_kwDODunzps48CMwQ","number":2968,"title":"`DatasetDict` cannot be exported to parquet if the splits have different features","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-09-25T22:18:39Z","updated_at":"2021-10-07T22:47:42Z","closed_at":"2021-10-07T22:47:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI'm trying to use parquet as a means of serialization for both `Dataset` and `DatasetDict` objects. Using `to_parquet` alongside `from_parquet` or `load_dataset` for a `Dataset` works perfectly. \r\n\r\nFor `DatasetDict`, I use `to_parquet` on each split to save the parquet files in individual folders representing individual splits. This works too, as long as the splits have identical features. If a split has different features to neighboring splits, then loading the dataset will fail: a single schema is used to load both splits, resulting in a failure to load the second parquet file.\r\n\r\n## Steps to reproduce the bug\r\n\r\nThe following works as expected:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\"lhoestq\/custom_squad\")\r\n\r\nds['train'].to_parquet(\".\/ds\/train\/split.parquet\")\r\nds['validation'].to_parquet(\".\/ds\/validation\/split.parquet\")\r\n\r\nbrand_new_dataset = load_dataset(\"ds\")\r\n```\r\n\r\nModifying a single split to add a new feature ends up in a crash:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\"lhoestq\/custom_squad\")\r\n\r\n\r\ndef identical_answers(e):\r\n e['identical_answers'] = len(set(e['answers']['text'])) == 1\r\n return e\r\n\r\n\r\nds['validation'] = ds['validation'].map(identical_answers)\r\nds['train'].to_parquet(\".\/ds\/train\/split.parquet\")\r\nds['validation'].to_parquet(\".\/ds\/validation\/split.parquet\")\r\n\r\nbrand_new_dataset = load_dataset(\"ds\")\r\n```\r\n```\r\n File \"\/home\/lysandre\/.config\/JetBrains\/PyCharm2021.2\/scratches\/datasets\/upload_dataset.py\", line 26, in \r\n brand_new_dataset = load_dataset(\"ds\")\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/load.py\", line 1151, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/builder.py\", line 642, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/builder.py\", line 732, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/builder.py\", line 1194, in _prepare_split\r\n writer.write_table(table)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/arrow_writer.py\", line 428, in write_table\r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/arrow_writer.py\", line 428, in \r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"pyarrow\/table.pxi\", line 1257, in pyarrow.lib.Table.__getitem__\r\n File \"pyarrow\/table.pxi\", line 1833, in pyarrow.lib.Table.column\r\n File \"pyarrow\/table.pxi\", line 1808, in pyarrow.lib.Table._ensure_integer_index\r\nKeyError: 'Field \"identical_answers\" does not exist in table schema'\r\n```\r\n\r\nIt does work, however, to use the `save_to_disk` and `load_from_disk` methods:\r\n\r\n```py\r\nfrom datasets import load_from_disk\r\n\r\nds = load_dataset(\"lhoestq\/custom_squad\")\r\n\r\n\r\ndef identical_answers(e):\r\n e['identical_answers'] = len(set(e['answers']['text'])) == 1\r\n return e\r\n\r\n\r\nds['validation'] = ds['validation'].map(identical_answers)\r\n\r\nds.save_to_disk(\"local_path\")\r\nbrand_new_dataset = load_from_disk(\"local_path\")\r\n```\r\n\r\n## Expected results\r\n\r\nThe saving works correctly - but the loading fails. I would expect either an error when saving or an error-less instantiation of the dataset through the parquet files.\r\n\r\nIf it's helpful, I've traced a possible patch to the `write_table` method here: \r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/26ff41aa3a642e46489db9e95be1e9a8c4e64bea\/src\/datasets\/arrow_writer.py#L424-L425\r\n\r\nThe writer is built only if the parquet writer is `None`, but I expect we would want to build a new writer as the table schema has changed. Furthermore, it relies on having the property `update_features` set to `True` in order to update the features:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/26ff41aa3a642e46489db9e95be1e9a8c4e64bea\/src\/datasets\/arrow_writer.py#L254-L255\r\n\r\nbut the `ArrowWriter` is instantiated without that option in the `_prepare_split` method of the `ArrowBasedBuilder`:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/26ff41aa3a642e46489db9e95be1e9a8c4e64bea\/src\/datasets\/builder.py#L1190\r\n\r\nUpdating these two parts to recreate a schema on each split results in an error that is, unfortunately, out of my expertise:\r\n\r\n```\r\n File \"\/home\/lysandre\/.config\/JetBrains\/PyCharm2021.2\/scratches\/datasets\/upload_dataset.py\", line 27, in \r\n brand_new_dataset = load_dataset(\"ds\")\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/load.py\", line 1163, in load_dataset\r\n ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/builder.py\", line 819, in as_dataset\r\n datasets = utils.map_nested(\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/utils\/py_utils.py\", line 207, in map_nested\r\n mapped = [\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/utils\/py_utils.py\", line 208, in \r\n _single_map_nested((function, obj, types, None, True))\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/utils\/py_utils.py\", line 143, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/builder.py\", line 850, in _build_single_dataset\r\n ds = self._as_dataset(\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/builder.py\", line 920, in _as_dataset\r\n dataset_kwargs = ArrowReader(self._cache_dir, self.info).read(\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/arrow_reader.py\", line 217, in read\r\n return self.read_files(files=files, original_instructions=instructions, in_memory=in_memory)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/arrow_reader.py\", line 238, in read_files\r\n pa_table = self._read_files(files, in_memory=in_memory)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/arrow_reader.py\", line 173, in _read_files\r\n pa_table: Table = self._get_table_from_filename(f_dict, in_memory=in_memory)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/arrow_reader.py\", line 308, in _get_table_from_filename\r\n table = ArrowReader.read_table(filename, in_memory=in_memory)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/arrow_reader.py\", line 327, in read_table\r\n return table_cls.from_file(filename)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/table.py\", line 458, in from_file\r\n table = _memory_mapped_arrow_table_from_file(filename)\r\n File \"\/home\/lysandre\/Workspaces\/Python\/datasets\/src\/datasets\/table.py\", line 45, in _memory_mapped_arrow_table_from_file\r\n pa_table = opened_stream.read_all()\r\n File \"pyarrow\/ipc.pxi\", line 563, in pyarrow.lib.RecordBatchReader.read_all\r\n File \"pyarrow\/error.pxi\", line 114, in pyarrow.lib.check_status\r\nOSError: Header-type of flatbuffer-encoded Message is not RecordBatch.\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.2.dev0\r\n- Platform: Linux-5.14.7-arch1-1-x86_64-with-glibc2.33\r\n- Python version: 3.9.7\r\n- PyArrow version: 5.0.0\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2968\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2968\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2967","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2967\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2967\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2967\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2967","id":1007194837,"node_id":"I_kwDODunzps48CJLV","number":2967,"title":"Adding vision-and-language datasets (e.g., VQA, VCR) to Datasets","user":{"login":"WadeYin9712","id":42200725,"node_id":"MDQ6VXNlcjQyMjAwNzI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42200725?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/WadeYin9712","html_url":"https:\/\/github.com\/WadeYin9712","followers_url":"https:\/\/api.github.com\/users\/WadeYin9712\/followers","following_url":"https:\/\/api.github.com\/users\/WadeYin9712\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/WadeYin9712\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/WadeYin9712\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/WadeYin9712\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/WadeYin9712\/orgs","repos_url":"https:\/\/api.github.com\/users\/WadeYin9712\/repos","events_url":"https:\/\/api.github.com\/users\/WadeYin9712\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/WadeYin9712\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-25T20:58:15Z","updated_at":"2021-10-03T20:34:22Z","closed_at":"2021-10-03T20:34:22Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nWould you like to add any vision-and-language datasets (e.g., VQA, VCR) to Huggingface Datasets?\r\n\r\n**Describe the solution you'd like**\r\nN\/A\r\n\r\n**Describe alternatives you've considered**\r\nN\/A\r\n\r\n**Additional context**\r\nThis is Da Yin at UCLA. Recently, we have published an EMNLP 2021 paper about geo-diverse visual commonsense reasoning (https:\/\/arxiv.org\/abs\/2109.06860). We propose a new dataset called GD-VCR, a vision-and-language dataset to evaluate how well V&L models perform on scenarios involving geo-location-specific commonsense. We hope to have our V&L dataset incorporated into Huggingface to further promote our project, but I haven't seen much V&L datasets in the current package. Is it possible to add V&L datasets, and if so, how should we prepare for the loading? Thank you very much!\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2967\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2967\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2966","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2966\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2966\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2966\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2966","id":1007142233,"node_id":"PR_kwDODunzps4sRRMs","number":2966,"title":"Upload greek-legal-code dataset","user":{"login":"christospi","id":9130406,"node_id":"MDQ6VXNlcjkxMzA0MDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9130406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/christospi","html_url":"https:\/\/github.com\/christospi","followers_url":"https:\/\/api.github.com\/users\/christospi\/followers","following_url":"https:\/\/api.github.com\/users\/christospi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/christospi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/christospi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/christospi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/christospi\/orgs","repos_url":"https:\/\/api.github.com\/users\/christospi\/repos","events_url":"https:\/\/api.github.com\/users\/christospi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/christospi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-25T16:52:15Z","updated_at":"2021-10-13T13:37:30Z","closed_at":"2021-10-13T13:37:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2966","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2966","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2966.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2966.patch","merged_at":"2021-10-13T13:37:30Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2966\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2966\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2965","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2965\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2965\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2965\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2965","id":1007084153,"node_id":"I_kwDODunzps48BuJ5","number":2965,"title":"Invalid download URL of WMT17 `zh-en` data ","user":{"login":"Ririkoo","id":3339950,"node_id":"MDQ6VXNlcjMzMzk5NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3339950?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Ririkoo","html_url":"https:\/\/github.com\/Ririkoo","followers_url":"https:\/\/api.github.com\/users\/Ririkoo\/followers","following_url":"https:\/\/api.github.com\/users\/Ririkoo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Ririkoo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Ririkoo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Ririkoo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Ririkoo\/orgs","repos_url":"https:\/\/api.github.com\/users\/Ririkoo\/repos","events_url":"https:\/\/api.github.com\/users\/Ririkoo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Ririkoo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-25T13:17:32Z","updated_at":"2022-01-19T14:09:48Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nPartial data (wmt17 zh-en) cannot be downloaded due to an invalid URL.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n dataset = load_dataset('wmt17','zh-en')\r\n```\r\n\r\n## Expected results\r\nConnectionError: Couldn't reach ftp:\/\/cwmt-wmt:cwmt-wmt@datasets.nju.edu.cn\/parallel\/casia2015.zip","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2965\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":1,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2965\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2964","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2964\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2964\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2964\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2964","id":1006605904,"node_id":"I_kwDODunzps47_5ZQ","number":2964,"title":"Error when calculating Matthews Correlation Coefficient loaded with `load_metric`","user":{"login":"alvarobartt","id":36760800,"node_id":"MDQ6VXNlcjM2NzYwODAw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36760800?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alvarobartt","html_url":"https:\/\/github.com\/alvarobartt","followers_url":"https:\/\/api.github.com\/users\/alvarobartt\/followers","following_url":"https:\/\/api.github.com\/users\/alvarobartt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alvarobartt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alvarobartt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alvarobartt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alvarobartt\/orgs","repos_url":"https:\/\/api.github.com\/users\/alvarobartt\/repos","events_url":"https:\/\/api.github.com\/users\/alvarobartt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alvarobartt\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-24T15:55:21Z","updated_at":"2021-09-25T08:06:07Z","closed_at":"2021-09-25T08:06:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nAfter loading the metric named \"[Matthews Correlation Coefficient](https:\/\/huggingface.co\/metrics\/matthews_correlation)\" from `\ud83e\udd17datasets`, the `.compute` method fails with the following exception `AttributeError: 'float' object has no attribute 'item'` (complete stack trace can be provided if required).\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\nimport torch\r\npredictions = torch.ones((10,))\r\nreferences = torch.zeros((10,))\r\n\r\nfrom datasets import load_metric\r\n\r\nMETRIC = load_metric(\"matthews_correlation\")\r\n\r\nresult = METRIC.compute(predictions=predictions, references=references)\r\n```\r\n\r\n## Expected results\r\n\r\nWe should expect a Python `dict` as it follows:\r\n\r\n```\r\n{\r\n \"matthews_correlation\": float()\r\n}\r\n```\r\n\r\nas defined in https:\/\/github.com\/huggingface\/datasets\/blob\/master\/metrics\/matthews_correlation\/matthews_correlation.py, so the fix will imply removing `.item()`, since the value returned by the `scikit-learn` function is not a `torch.Tensor` but a `float`, which means that the `.item()` will fail.\r\n\r\n## Actual results\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\/home\/alvaro.bartolome\/XXX\/xxx\/cli.py\", line 59, in main\r\n app()\r\n File \"\/home\/alvaro.bartolome\/miniconda3\/envs\/xxx\/lib\/python3.9\/site-packages\/typer\/main.py\", line 214, in __call__\r\n return get_command(self)(*args, **kwargs)\r\n File \"\/home\/alvaro.bartolome\/miniconda3\/envs\/xxx\/lib\/python3.9\/site-packages\/click\/core.py\", line 1137, in __call__\r\n return self.main(*args, **kwargs)\r\n File \"\/home\/alvaro.bartolome\/miniconda3\/envs\/xxx\/lib\/python3.9\/site-packages\/click\/core.py\", line 1062, in main\r\n rv = self.invoke(ctx)\r\n File \"\/home\/alvaro.bartolome\/miniconda3\/envs\/xxx\/lib\/python3.9\/site-packages\/click\/core.py\", line 1668, in invoke\r\n return _process_result(sub_ctx.command.invoke(sub_ctx))\r\n File \"\/home\/alvaro.bartolome\/miniconda3\/envs\/xxx\/lib\/python3.9\/site-packages\/click\/core.py\", line 1404, in invoke\r\n return ctx.invoke(self.callback, **ctx.params)\r\n File \"\/home\/alvaro.bartolome\/miniconda3\/envs\/xxx\/lib\/python3.9\/site-packages\/click\/core.py\", line 763, in invoke\r\n return __callback(*args, **kwargs)\r\n File \"\/home\/alvaro.bartolome\/miniconda3\/envs\/xxx\/lib\/python3.9\/site-packages\/typer\/main.py\", line 500, in wrapper\r\n return callback(**use_params) # type: ignore\r\n File \"\/home\/alvaro.bartolome\/XXX\/xxx\/cli.py\", line 43, in train\r\n metrics = trainer.evaluate()\r\n File \"\/home\/alvaro.bartolome\/miniconda3\/envs\/xxx\/lib\/python3.9\/site-packages\/transformers\/trainer.py\", line 2051, in evaluate\r\n output = eval_loop(\r\n File \"\/home\/alvaro.bartolome\/miniconda3\/envs\/xxx\/lib\/python3.9\/site-packages\/transformers\/trainer.py\", line 2292, in evaluation_loop\r\n metrics = self.compute_metrics(EvalPrediction(predictions=all_preds, label_ids=all_labels))\r\n File \"\/home\/alvaro.bartolome\/XXX\/xxx\/metrics.py\", line 20, in compute_metrics\r\n res = METRIC.compute(predictions=predictions, references=eval_preds.label_ids)\r\n File \"\/home\/alvaro.bartolome\/miniconda3\/envs\/lang\/lib\/python3.9\/site-packages\/datasets\/metric.py\", line 402, in compute\r\n output = self._compute(predictions=predictions, references=references, **kwargs)\r\n File \"\/home\/alvaro.bartolome\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/matthews_correlation\/0275f1e9a4d318e3ea8cdd87547ee0d58d894966616052e3d18444ac8ddd2357\/matthews_correlation.py\", line 88, in _compute\r\n \"matthews_correlation\": matthews_corrcoef(references, predictions, sample_weight=sample_weight).item(),\r\nAttributeError: 'float' object has no attribute 'item'\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-4.15.0-1113-azure-x86_64-with-glibc2.23\r\n- Python version: 3.9.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2964\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2964\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2963","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2963\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2963\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2963\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2963","id":1006588605,"node_id":"I_kwDODunzps47_1K9","number":2963,"title":" raise TypeError( TypeError: Provided `function` which is applied to all elements of table returns a variable of type . Make sure provided `function` returns a variable of type `dict` to update the dataset or `None` if you are only interested in side effects.","user":{"login":"keloemma","id":40454218,"node_id":"MDQ6VXNlcjQwNDU0MjE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40454218?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/keloemma","html_url":"https:\/\/github.com\/keloemma","followers_url":"https:\/\/api.github.com\/users\/keloemma\/followers","following_url":"https:\/\/api.github.com\/users\/keloemma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/keloemma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/keloemma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/keloemma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/keloemma\/orgs","repos_url":"https:\/\/api.github.com\/users\/keloemma\/repos","events_url":"https:\/\/api.github.com\/users\/keloemma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/keloemma\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-24T15:35:11Z","updated_at":"2021-09-24T15:38:24Z","closed_at":"2021-09-24T15:38:24Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\nI am trying to use Dataset to load my file in order to use Bert embeddings model baut when I finished loading using dataset and I want to pass to the tokenizer using the function map; I get the following error : raise TypeError(\r\nTypeError: Provided `function` which is applied to all elements of table returns a variable of type . Make sure provided `function` returns a variable of type `dict` to update the dataset or `None` if you are only interested in side effects.\r\n\r\nI was able to load my file using dataset before but since this morning , I keep getting this erreor.\r\n## Steps to reproduce the bug\r\n```python\r\n# Xtrain, ytrain, filename, len_labels = read_file_2(fic)\r\n\t# Xtrain, lge_size = get_flaubert_layer(Xtrain, path_to_model_lge)\r\n\r\n\tdata_preprocessed = make_new_traindata(Xtrain)\r\n\t\r\n\tmy_dict = {\"verbatim\": data_preprocessed[1], \"label\": ytrain} # lemme avec conjonction\r\n\tdataset = Dataset.from_dict(my_dict)\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform:\r\n- Python version:\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2963\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2963\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2962","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2962\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2962\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2962\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2962","id":1006557666,"node_id":"I_kwDODunzps47_tni","number":2962,"title":"Enable splits during streaming the dataset","user":{"login":"merveenoyan","id":53175384,"node_id":"MDQ6VXNlcjUzMTc1Mzg0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53175384?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/merveenoyan","html_url":"https:\/\/github.com\/merveenoyan","followers_url":"https:\/\/api.github.com\/users\/merveenoyan\/followers","following_url":"https:\/\/api.github.com\/users\/merveenoyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/merveenoyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/merveenoyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/merveenoyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/merveenoyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/merveenoyan\/repos","events_url":"https:\/\/api.github.com\/users\/merveenoyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/merveenoyan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-24T15:01:29Z","updated_at":"2021-09-24T15:01:29Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the Problem\r\n\r\nI'd like to stream only a specific percentage or part of the dataset. \r\nI want to do splitting when I'm streaming dataset as well.\r\n\r\n## Solution\r\nEnabling splits when `streaming = True` as well.\r\n`e.g. dataset = load_dataset('dataset', split='train[:100]', streaming = True)`\r\n\r\n## Alternatives\r\nBelow is the alternative of doing it.\r\n`dataset = load_dataset(\"dataset\", split='train', streaming = True).take(100)`\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2962\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2962\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2961","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2961\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2961\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2961\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2961","id":1006453781,"node_id":"PR_kwDODunzps4sPTXV","number":2961,"title":"Fix CI doc build","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-24T13:13:28Z","updated_at":"2021-09-24T13:18:07Z","closed_at":"2021-09-24T13:18:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2961","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2961","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2961.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2961.patch","merged_at":"2021-09-24T13:18:07Z"},"body":"Pin `fsspec`.\r\n\r\nBefore the issue: 'fsspec-2021.8.1', 's3fs-2021.8.1'\r\nGenerating the issue: 'fsspec-2021.9.0', 's3fs-0.5.1'\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2961\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2961\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2960","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2960\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2960\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2960\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2960","id":1006222850,"node_id":"PR_kwDODunzps4sOl0Y","number":2960,"title":"Support pandas 1.3 new `read_csv` parameters","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-24T08:37:24Z","updated_at":"2021-09-24T11:22:31Z","closed_at":"2021-09-24T11:22:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2960","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2960","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2960.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2960.patch","merged_at":"2021-09-24T11:22:30Z"},"body":"Support two new arguments introduced in pandas v1.3.0:\r\n- `encoding_errors`\r\n- `on_bad_lines` \r\n\r\n`read_csv` reference: https:\/\/pandas.pydata.org\/pandas-docs\/stable\/reference\/api\/pandas.read_csv.html","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2960\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2960\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2959","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2959\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2959\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2959\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2959","id":1005547632,"node_id":"PR_kwDODunzps4sMihl","number":2959,"title":"Added computer vision tasks","user":{"login":"merveenoyan","id":53175384,"node_id":"MDQ6VXNlcjUzMTc1Mzg0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53175384?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/merveenoyan","html_url":"https:\/\/github.com\/merveenoyan","followers_url":"https:\/\/api.github.com\/users\/merveenoyan\/followers","following_url":"https:\/\/api.github.com\/users\/merveenoyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/merveenoyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/merveenoyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/merveenoyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/merveenoyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/merveenoyan\/repos","events_url":"https:\/\/api.github.com\/users\/merveenoyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/merveenoyan\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-09-23T15:07:27Z","updated_at":"2022-02-04T14:16:58Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2959","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2959","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2959.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2959.patch","merged_at":null},"body":"Added various image processing\/computer vision tasks.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2959\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2959\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2958","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2958\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2958\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2958\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2958","id":1005144601,"node_id":"PR_kwDODunzps4sLTaB","number":2958,"title":"Add security policy to the project","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-23T08:20:55Z","updated_at":"2021-10-21T15:16:44Z","closed_at":"2021-10-21T15:16:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2958","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2958","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2958.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2958.patch","merged_at":"2021-10-21T15:16:43Z"},"body":"Add security policy to the project, as recommended by GitHub: https:\/\/docs.github.com\/en\/code-security\/getting-started\/adding-a-security-policy-to-your-repository\r\n\r\nClose #2953.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2958\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2958\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2957","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2957\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2957\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2957\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2957","id":1004868337,"node_id":"I_kwDODunzps475RLx","number":2957,"title":"MultiWOZ Dataset NonMatchingChecksumError","user":{"login":"bradyneal","id":8754873,"node_id":"MDQ6VXNlcjg3NTQ4NzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8754873?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bradyneal","html_url":"https:\/\/github.com\/bradyneal","followers_url":"https:\/\/api.github.com\/users\/bradyneal\/followers","following_url":"https:\/\/api.github.com\/users\/bradyneal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bradyneal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bradyneal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bradyneal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bradyneal\/orgs","repos_url":"https:\/\/api.github.com\/users\/bradyneal\/repos","events_url":"https:\/\/api.github.com\/users\/bradyneal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bradyneal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-22T23:45:00Z","updated_at":"2021-10-01T06:23:32Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe checksums for the downloaded MultiWOZ dataset and source MultiWOZ dataset aren't matching.\r\n\r\n## Steps to reproduce the bug\r\nBoth of the below dataset versions yield the checksum error:\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('multi_woz_v22', 'v2.2')\r\ndataset = load_dataset('multi_woz_v22', 'v2.2_active_only')\r\n```\r\n\r\n## Expected results\r\nFor the above calls to `load_dataset` to work.\r\n\r\n## Actual results\r\nNonMatchingChecksumError. Traceback:\r\n> Traceback (most recent call last):\r\n File \"\/Users\/brady\/anaconda3\/envs\/elysium\/lib\/python3.8\/site-packages\/IPython\/core\/interactiveshell.py\", line 3441, in run_code\r\n exec(code_obj, self.user_global_ns, self.user_ns)\r\n File \"\", line 1, in \r\n dataset = load_dataset('multi_woz_v22', 'v2.2')\r\n File \"\/Users\/brady\/anaconda3\/envs\/elysium\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 847, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/Users\/brady\/anaconda3\/envs\/elysium\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 615, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/Users\/brady\/anaconda3\/envs\/elysium\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 675, in _download_and_prepare\r\n verify_checksums(\r\n File \"\/Users\/brady\/anaconda3\/envs\/elysium\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 40, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/dialog_acts.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/test\/dialogues_001.json']\r\n\r\n## Environment info\r\n- `datasets` version: 1.11.0\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n- Python version: 3.8.10\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2957\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2957\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2956","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2956\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2956\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2956\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2956","id":1004306367,"node_id":"I_kwDODunzps473H-_","number":2956,"title":"Cache problem in the `load_dataset` method for local compressed file(s)","user":{"login":"SaulLu","id":55560583,"node_id":"MDQ6VXNlcjU1NTYwNTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55560583?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SaulLu","html_url":"https:\/\/github.com\/SaulLu","followers_url":"https:\/\/api.github.com\/users\/SaulLu\/followers","following_url":"https:\/\/api.github.com\/users\/SaulLu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SaulLu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SaulLu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SaulLu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SaulLu\/orgs","repos_url":"https:\/\/api.github.com\/users\/SaulLu\/repos","events_url":"https:\/\/api.github.com\/users\/SaulLu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SaulLu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-22T13:34:32Z","updated_at":"2021-09-22T13:34:32Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nCache problem in the `load_dataset` method: when modifying a compressed file in a local folder `load_dataset` doesn't detect the change and load the previous version.\r\n\r\n## Steps to reproduce the bug\r\n\r\nTo test it directly, I have prepared a [Google Colaboratory notebook](https:\/\/colab.research.google.com\/drive\/11Em_Amoc-aPGhSBIkSHU2AvEh24nVayy?usp=sharing) that shows this behavior. \r\n\r\nFor this example, I have created a toy dataset at: https:\/\/huggingface.co\/datasets\/SaulLu\/toy_struc_dataset\r\n\r\nThis dataset is composed of two versions: \r\n- v1 on commit `a6beb46` which has a single example `{'id': 1, 'value': {'tag': 'a', 'value': 1}}` in file `train.jsonl.gz`\r\n- v2 on commit `e7935f4` (`main` head) which has a single example `{'attr': 1, 'id': 1, 'value': 'a'}` in file `train.jsonl.gz`\r\n\r\nWith a terminal, we can start to get the v1 version of the dataset\r\n```bash\r\ngit lfs install\r\ngit clone https:\/\/huggingface.co\/datasets\/SaulLu\/toy_struc_dataset\r\ncd toy_struc_dataset\r\ngit checkout a6beb46\r\n```\r\nThen we can load it with python and look at the content:\r\n```python\r\nfrom datasets import load_dataset\r\n\r\npath = \"\/content\/toy_struc_dataset\"\r\ndataset = load_dataset(path, data_files={\"train\": \"*.jsonl.gz\"})\r\nprint(dataset[\"train\"][0])\r\n```\r\nOutput\r\n```\r\n{'id': 1, 'value': {'tag': 'a', 'value': 1}} # This is the example in v1\r\n```\r\n\r\nWith a terminal, we can now start to get the v1 version of the dataset\r\n```bash\r\ngit checkout main\r\n```\r\nThen we can load it with python and look at the content:\r\n```python\r\nfrom datasets import load_dataset\r\n\r\npath = \"\/content\/toy_struc_dataset\"\r\ndataset = load_dataset(path, data_files={\"train\": \"*.jsonl.gz\"})\r\nprint(dataset[\"train\"][0])\r\n```\r\nOutput\r\n```\r\n{'id': 1, 'value': {'tag': 'a', 'value': 1}} # This is the example in v1 (not v2)\r\n```\r\n## Expected results\r\nThe last output should have been \r\n```\r\n{\"id\":1, \"value\": \"a\", \"attr\": 1} # This is the example in v2\r\n```\r\n\r\n## Ideas\r\n\r\nAs discussed offline with Quentin, if the cache hash was ever sensitive to changes in a compressed file we would probably not have the problem anymore.\r\n\r\nThis situation leads me to suggest 2 other features:\r\n- to also have an `load_from_cache_file` argument in the \"load_dataset\" method\r\n- to reorganize the cache so that we can delete the caches related to a dataset (cf issue #ToBeFilledSoon)\r\n\r\nAnd thanks again for this great library :hugs: \r\n\r\n## Environment info\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.12\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2956\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2956\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2955","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2955\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2955\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2955\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2955","id":1003999469,"node_id":"PR_kwDODunzps4sHuRu","number":2955,"title":"Update legacy Python image for CI tests in Linux","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-22T08:25:27Z","updated_at":"2021-09-24T10:36:05Z","closed_at":"2021-09-24T10:36:05Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2955","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2955","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2955.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2955.patch","merged_at":"2021-09-24T10:36:05Z"},"body":"Instead of legacy, use next-generation convenience images, built from the ground up with CI, efficiency, and determinism in mind. Here are some of the highlights:\r\n\r\n- Faster spin-up time - In Docker terminology, these next-gen images will generally have fewer and smaller layers. Using these new images will lead to faster image downloads when a build starts, and a higher likelihood that the image is already cached on the host.\r\n\r\n- Improved reliability and stability - The existing legacy convenience images are rebuilt practically every day with potential changes from upstream that we cannot always test fast enough. This leads to frequent breaking changes, which is not the best environment for stable, deterministic builds. Next-gen images will only be rebuilt for security and critical-bugs, leading to more stable and deterministic images.\r\n\r\nMore info: https:\/\/circleci.com\/docs\/2.0\/circleci-images","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2955\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2955\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2954","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2954\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2954\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2954\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2954","id":1003904803,"node_id":"PR_kwDODunzps4sHa8O","number":2954,"title":"Run tests in parallel","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-22T07:00:44Z","updated_at":"2021-09-28T06:55:51Z","closed_at":"2021-09-28T06:55:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2954","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2954","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2954.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2954.patch","merged_at":"2021-09-28T06:55:51Z"},"body":"Run CI tests in parallel to speed up the test suite.\r\n\r\nSpeed up results:\r\n- Linux: from `7m 30s` to `5m 32s`\r\n- Windows: from `13m 52s` to `11m 10s`\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2954\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2954\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2953","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2953\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2953\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2953\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2953","id":1002766517,"node_id":"I_kwDODunzps47xQC1","number":2953,"title":"Trying to get in touch regarding a security issue","user":{"login":"JamieSlome","id":55323451,"node_id":"MDQ6VXNlcjU1MzIzNDUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55323451?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JamieSlome","html_url":"https:\/\/github.com\/JamieSlome","followers_url":"https:\/\/api.github.com\/users\/JamieSlome\/followers","following_url":"https:\/\/api.github.com\/users\/JamieSlome\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JamieSlome\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JamieSlome\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JamieSlome\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JamieSlome\/orgs","repos_url":"https:\/\/api.github.com\/users\/JamieSlome\/repos","events_url":"https:\/\/api.github.com\/users\/JamieSlome\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JamieSlome\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-21T15:58:13Z","updated_at":"2021-10-21T15:16:43Z","closed_at":"2021-10-21T15:16:43Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hey there!\n\nI'd like to report a security issue but cannot find contact instructions on your repository.\n\nIf not a hassle, might you kindly add a `SECURITY.md` file with an email, or another contact method? GitHub [recommends](https:\/\/docs.github.com\/en\/code-security\/getting-started\/adding-a-security-policy-to-your-repository) this best practice to ensure security issues are responsibly disclosed, and it would serve as a simple instruction for security researchers in the future.\n\nThank you for your consideration, and I look forward to hearing from you!\n\n(cc @huntr-helper)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2953\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2953\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2952","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2952\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2952\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2952\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2952","id":1002704096,"node_id":"PR_kwDODunzps4sDU8S","number":2952,"title":"Fix missing conda deps","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-21T15:23:01Z","updated_at":"2021-09-22T04:39:59Z","closed_at":"2021-09-21T15:30:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2952","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2952","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2952.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2952.patch","merged_at":"2021-09-21T15:30:44Z"},"body":"`aiohttp` was added as a dependency in #2662 but was missing for the conda build, which causes the 1.12.0 and 1.12.1 to fail.\r\n\r\nFix #2932.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2952\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2952\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2951","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2951\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2951\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2951\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2951","id":1001267888,"node_id":"PR_kwDODunzps4r-lGs","number":2951,"title":"Dummy labels no longer on by default in `to_tf_dataset`","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-20T18:26:59Z","updated_at":"2021-09-21T14:00:57Z","closed_at":"2021-09-21T10:14:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2951","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2951","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2951.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2951.patch","merged_at":"2021-09-21T10:14:32Z"},"body":"After more experimentation, I think I have a way to do things that doesn't depend on adding `dummy_labels` - they were quite a hacky solution anyway!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2951\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2951\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2950","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2950\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2950\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2950\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2950","id":1001085353,"node_id":"PR_kwDODunzps4r-AKu","number":2950,"title":"Fix fn kwargs in filter","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-20T15:10:26Z","updated_at":"2021-09-20T16:22:59Z","closed_at":"2021-09-20T15:28:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2950","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2950","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2950.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2950.patch","merged_at":"2021-09-20T15:28:01Z"},"body":"#2836 broke the `fn_kwargs` parameter of `filter`, as mentioned in https:\/\/github.com\/huggingface\/datasets\/issues\/2927\r\n\r\nI fixed that and added a test to make sure it doesn't happen again (for either map or filter)\r\n\r\nFix #2927","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2950\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2950\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2949","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2949\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2949\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2949\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2949","id":1001026680,"node_id":"PR_kwDODunzps4r90Pt","number":2949,"title":"Introduce web and wiki config in triviaqa dataset","user":{"login":"shirte","id":1706443,"node_id":"MDQ6VXNlcjE3MDY0NDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1706443?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shirte","html_url":"https:\/\/github.com\/shirte","followers_url":"https:\/\/api.github.com\/users\/shirte\/followers","following_url":"https:\/\/api.github.com\/users\/shirte\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shirte\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shirte\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shirte\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shirte\/orgs","repos_url":"https:\/\/api.github.com\/users\/shirte\/repos","events_url":"https:\/\/api.github.com\/users\/shirte\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shirte\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-20T14:17:23Z","updated_at":"2021-10-05T13:20:52Z","closed_at":"2021-10-01T15:39:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2949","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2949","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2949.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2949.patch","merged_at":"2021-10-01T15:39:29Z"},"body":"The TriviaQA paper suggests that the two subsets (Wikipedia and Web)\r\nshould be treated differently. There are also different leaderboards\r\nfor the two sets on CodaLab. For that reason, introduce additional\r\nbuilder configs in the trivia_qa dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2949\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2949\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2948","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2948\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2948\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2948\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2948","id":1000844077,"node_id":"PR_kwDODunzps4r9PdV","number":2948,"title":"Fix minor URL format in scitldr dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-20T11:11:32Z","updated_at":"2021-09-20T13:18:28Z","closed_at":"2021-09-20T13:18:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2948","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2948","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2948.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2948.patch","merged_at":"2021-09-20T13:18:28Z"},"body":"While investigating issue #2918, I found this minor format issues in the URLs (if runned in a Windows machine).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2948\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2948\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2947","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2947\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2947\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2947\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2947","id":1000798338,"node_id":"PR_kwDODunzps4r9GIP","number":2947,"title":"Don't use old, incompatible cache for the new `filter`","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-20T10:18:59Z","updated_at":"2021-09-20T16:25:09Z","closed_at":"2021-09-20T13:43:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2947","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2947","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2947.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2947.patch","merged_at":"2021-09-20T13:43:01Z"},"body":"#2836 changed `Dataset.filter` and the resulting data that are stored in the cache are different and incompatible with the ones of the previous `filter` implementation.\r\n\r\nHowever the caching mechanism wasn't able to differentiate between the old and the new implementation of filter (only the method name was taken into account). \r\n\r\nThis is an issue because anyone that update `datasets` and re-runs some code that uses `filter` would see an error, because the cache would try to load an incompatible `filter` result.\r\n\r\nTo fix this I added the notion of versioning for dataset transform in the caching mechanism, and bumped the version of the `filter` implementation to 2.0.0\r\n\r\nThis way the new `filter` outputs are now considered different from the old ones from the caching point of view.\r\n\r\nThis should fix #2943\r\n\r\ncc @anton-l","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2947\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2947\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2946","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2946\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2946\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2946\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2946","id":1000754824,"node_id":"PR_kwDODunzps4r89f8","number":2946,"title":"Update meteor score from nltk update","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-20T09:28:46Z","updated_at":"2021-09-20T09:35:59Z","closed_at":"2021-09-20T09:35:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2946","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2946","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2946.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2946.patch","merged_at":"2021-09-20T09:35:59Z"},"body":"It looks like there were issues in NLTK on the way the METEOR score was computed.\r\nA fix was added in NLTK at https:\/\/github.com\/nltk\/nltk\/pull\/2763, and therefore the scoring function no longer returns the same values.\r\n\r\nI updated the score of the example in the docs","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2946\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2946\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2945","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2945\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2945\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2945\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2945","id":1000624883,"node_id":"I_kwDODunzps47pFLz","number":2945,"title":"Protect master branch","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-20T06:47:01Z","updated_at":"2021-09-20T12:01:27Z","closed_at":"2021-09-20T12:00:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After accidental merge commit (91c55355b634d0dc73350a7ddee1a6776dbbdd69) into `datasets` master branch, all commits present in the feature branch were permanently added to `datasets` master branch history, as e.g.:\r\n- 00cc036fea7c7745cfe722360036ed306796a3f2\r\n- 13ae8c98602bbad8197de3b9b425f4c78f582af1\r\n- ...\r\n\r\nI propose to protect our master branch, so that we avoid we can accidentally make this kind of mistakes in the future:\r\n- [x] For Pull Requests using GitHub, allow only squash merging, so that only a single commit per Pull Request is merged into the master branch\r\n - Currently, simple merge commits are already disabled\r\n - I propose to disable rebase merging as well\r\n- ~~Protect the master branch from direct pushes (to avoid accidentally pushing of merge commits)~~\r\n - ~~This protection would reject direct pushes to master branch~~\r\n - ~~If so, for each release (when we need to commit directly to the master branch), we should previously disable the protection and re-enable it again after the release~~\r\n- [x] Protect the master branch only from direct pushing of **merge commits**\r\n - GitHub offers the possibility to protect the master branch only from merge commits (which are the ones that introduce all the commits from the feature branch into the master branch).\r\n - No need to disable\/re-enable this protection on each release \r\n\r\nThis purpose of this Issue is to open a discussion about this problem and to agree in a solution.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2945\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2945\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2944","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2944\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2944\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2944\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2944","id":1000544370,"node_id":"I_kwDODunzps47oxhy","number":2944,"title":"Add `remove_columns` to `IterableDataset ` ","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-20T04:01:00Z","updated_at":"2021-10-08T15:31:53Z","closed_at":"2021-10-08T15:31:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nA clear and concise description of what the problem is.\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"c4\", 'realnewslike', streaming =True, split='train')\r\ndataset = dataset.remove_columns('url')\r\n```\r\n```\r\nAttributeError: 'IterableDataset' object has no attribute 'remove_columns'\r\n```\r\n\r\n**Describe the solution you'd like**\r\n\r\nIt would be nice to have `.remove_columns()` to match the `Datasets` api. \r\n\r\n\r\n**Describe alternatives you've considered**\r\n\r\nThis can be done with a single call to `.map()`, \r\n\r\nI can try to help add this. \ud83e\udd17","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2944\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2944\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2943","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2943\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2943\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2943\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2943","id":1000355115,"node_id":"I_kwDODunzps47oDUr","number":2943,"title":"Backwards compatibility broken for cached datasets that use `.filter()`","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-09-19T16:16:37Z","updated_at":"2021-09-20T16:25:43Z","closed_at":"2021-09-20T16:25:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAfter upgrading to datasets `1.12.0`, some cached `.filter()` steps from `1.11.0` started failing with \r\n`ValueError: Keys mismatch: between {'indices': Value(dtype='uint64', id=None)} and {'file': Value(dtype='string', id=None), 'text': Value(dtype='string', id=None), 'speaker_id': Value(dtype='int64', id=None), 'chapter_id': Value(dtype='int64', id=None), 'id': Value(dtype='string', id=None)}`\r\n\r\nRelated feature: https:\/\/github.com\/huggingface\/datasets\/pull\/2836\r\n\r\n:question: This is probably a `wontfix` bug, since it can be solved by simply cleaning the related cache dirs, but the workaround could be useful for someone googling the error :) \r\n\r\n## Workaround\r\nRemove the cache for the given dataset, e.g. `rm -rf ~\/.cache\/huggingface\/datasets\/librispeech_asr`.\r\n\r\n## Steps to reproduce the bug\r\n1. Delete `~\/.cache\/huggingface\/datasets\/librispeech_asr` if it exists.\r\n\r\n2. `pip install datasets==1.11.0` and run the following snippet:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nids = [\"1272-141231-0000\"]\r\nds = load_dataset(\"patrickvonplaten\/librispeech_asr_dummy\", \"clean\", split=\"validation\")\r\nds = ds.filter(lambda x: x[\"id\"] in ids)\r\n```\r\n3. `pip install datasets==1.12.1` and re-run the code again\r\n\r\n## Expected results\r\nSame result as with the previous `datasets` version.\r\n\r\n## Actual results\r\n```bash\r\nReusing dataset librispeech_asr (.\/.cache\/huggingface\/datasets\/librispeech_asr\/clean\/2.1.0\/468ec03677f46a8714ac6b5b64dba02d246a228d92cbbad7f3dc190fa039eab1)\r\nLoading cached processed dataset at .\/.cache\/huggingface\/datasets\/librispeech_asr\/clean\/2.1.0\/468ec03677f46a8714ac6b5b64dba02d246a228d92cbbad7f3dc190fa039eab1\/cache-cd1c29844fdbc87a.arrow\r\nTraceback (most recent call last):\r\n File \".\/repos\/transformers\/src\/transformers\/models\/wav2vec2\/try_dataset.py\", line 5, in \r\n ds = ds.filter(lambda x: x[\"id\"] in ids)\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 185, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py\", line 398, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 2169, in filter\r\n indices = self.map(\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 1686, in map\r\n return self._map_single(\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 185, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py\", line 398, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 1896, in _map_single\r\n return Dataset.from_file(cache_file_name, info=info, split=self.split)\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 343, in from_file\r\n return cls(\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 282, in __init__\r\n self.info.features = self.info.features.reorder_fields_as(inferred_features)\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/features.py\", line 1151, in reorder_fields_as\r\n return Features(recursive_reorder(self, other))\r\n File \".\/envs\/transformers\/lib\/python3.8\/site-packages\/datasets\/features.py\", line 1140, in recursive_reorder\r\n raise ValueError(f\"Keys mismatch: between {source} and {target}\" + stack_position)\r\nValueError: Keys mismatch: between {'indices': Value(dtype='uint64', id=None)} and {'file': Value(dtype='string', id=None), 'text': Value(dtype='string', id=None), 'speaker_id': Value(dtype='int64', id=None), 'chapter_id': Value(dtype='int64', id=None), 'id': Value(dtype='string', id=None)}\r\n\r\nProcess finished with exit code 1\r\n\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-5.11.0-34-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.10\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2943\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2943\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2942","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2942\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2942\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2942\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2942","id":1000309765,"node_id":"PR_kwDODunzps4r7tY6","number":2942,"title":"Add SEDE dataset","user":{"login":"Hazoom","id":13545154,"node_id":"MDQ6VXNlcjEzNTQ1MTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13545154?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Hazoom","html_url":"https:\/\/github.com\/Hazoom","followers_url":"https:\/\/api.github.com\/users\/Hazoom\/followers","following_url":"https:\/\/api.github.com\/users\/Hazoom\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Hazoom\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Hazoom\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Hazoom\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Hazoom\/orgs","repos_url":"https:\/\/api.github.com\/users\/Hazoom\/repos","events_url":"https:\/\/api.github.com\/users\/Hazoom\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Hazoom\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-09-19T13:11:24Z","updated_at":"2021-09-24T10:39:55Z","closed_at":"2021-09-24T10:39:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2942","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2942","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2942.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2942.patch","merged_at":"2021-09-24T10:39:54Z"},"body":"This PR adds the SEDE dataset for the task of realistic Text-to-SQL, following the instructions of how to add a database and a dataset card.\r\n\r\nPlease see our paper for more details: https:\/\/arxiv.org\/abs\/2106.05006","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2942\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2942\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2941","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2941\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2941\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2941\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2941","id":1000000711,"node_id":"I_kwDODunzps47mszH","number":2941,"title":"OSCAR unshuffled_original_ko: NonMatchingSplitsSizesError","user":{"login":"ayaka14732","id":68557794,"node_id":"MDQ6VXNlcjY4NTU3Nzk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68557794?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ayaka14732","html_url":"https:\/\/github.com\/ayaka14732","followers_url":"https:\/\/api.github.com\/users\/ayaka14732\/followers","following_url":"https:\/\/api.github.com\/users\/ayaka14732\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ayaka14732\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ayaka14732\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ayaka14732\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ayaka14732\/orgs","repos_url":"https:\/\/api.github.com\/users\/ayaka14732\/repos","events_url":"https:\/\/api.github.com\/users\/ayaka14732\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ayaka14732\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-18T10:39:13Z","updated_at":"2022-01-19T14:10:07Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nCannot download OSCAR `unshuffled_original_ko` due to `NonMatchingSplitsSizesError`.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\n>>> dataset = datasets.load_dataset('oscar', 'unshuffled_original_ko')\r\nNonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=25292102197, num_examples=7345075, dataset_name='oscar'), 'recorded': SplitInfo(name='train', num_bytes=25284578514, num_examples=7344907, dataset_name='oscar')}]\r\n```\r\n\r\n## Expected results\r\n\r\nLoading is successful.\r\n\r\n## Actual results\r\n\r\nLoading throws above error.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-5.4.0-81-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2941\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2941\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2940","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2940\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2940\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2940\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2940","id":999680796,"node_id":"PR_kwDODunzps4r6EUF","number":2940,"title":"add swedish_medical_ner dataset","user":{"login":"bwang482","id":6764450,"node_id":"MDQ6VXNlcjY3NjQ0NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6764450?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bwang482","html_url":"https:\/\/github.com\/bwang482","followers_url":"https:\/\/api.github.com\/users\/bwang482\/followers","following_url":"https:\/\/api.github.com\/users\/bwang482\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bwang482\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bwang482\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bwang482\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bwang482\/orgs","repos_url":"https:\/\/api.github.com\/users\/bwang482\/repos","events_url":"https:\/\/api.github.com\/users\/bwang482\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bwang482\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-17T20:03:05Z","updated_at":"2021-10-05T12:13:34Z","closed_at":"2021-10-05T12:13:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2940","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2940","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2940.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2940.patch","merged_at":"2021-10-05T12:13:33Z"},"body":"Adding the Swedish Medical NER dataset, listed in \"Biomedical Datasets - BigScience Workshop 2021\"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2940\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2940\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2939","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2939\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2939\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2939\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2939","id":999639630,"node_id":"PR_kwDODunzps4r58Gu","number":2939,"title":"MENYO-20k repo has moved, updating URL","user":{"login":"cdleong","id":4109253,"node_id":"MDQ6VXNlcjQxMDkyNTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4109253?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cdleong","html_url":"https:\/\/github.com\/cdleong","followers_url":"https:\/\/api.github.com\/users\/cdleong\/followers","following_url":"https:\/\/api.github.com\/users\/cdleong\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cdleong\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cdleong\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cdleong\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cdleong\/orgs","repos_url":"https:\/\/api.github.com\/users\/cdleong\/repos","events_url":"https:\/\/api.github.com\/users\/cdleong\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cdleong\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-17T19:01:54Z","updated_at":"2021-09-21T15:31:37Z","closed_at":"2021-09-21T15:31:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2939","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2939","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2939.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2939.patch","merged_at":"2021-09-21T15:31:36Z"},"body":"Dataset repo moved to https:\/\/github.com\/uds-lsv\/menyo-20k_MT, now editing URL to match.\r\n\r\nhttps:\/\/github.com\/uds-lsv\/menyo-20k_MT\/blob\/master\/data\/train.tsv is the file we're looking for","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2939\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2939\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2938","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2938\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2938\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2938\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2938","id":999552263,"node_id":"PR_kwDODunzps4r5qwa","number":2938,"title":"Take namespace into account in caching","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-09-17T16:57:33Z","updated_at":"2021-12-17T10:52:18Z","closed_at":"2021-09-29T13:01:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2938","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2938","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2938.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2938.patch","merged_at":"2021-09-29T13:01:31Z"},"body":"Loading a dataset \"username\/dataset_name\" hosted by a user on the hub used to cache the dataset only taking into account the dataset name, and ignorign the username. Because of this, if a user later loads \"dataset_name\" without specifying the username, it would reload the dataset from the cache instead of failing.\r\n\r\nI changed the dataset cache and module cache mechanism to include the username in the name of the cache directory that is used:\r\n\r\n`~\/.cache\/huggingface\/datasets\/username\/dataset_name` for the data\r\n`~\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/username\/dataset_name` for the python files\r\n<\/s>\r\nEDIT: actually using three underscores:\r\n`~\/.cache\/huggingface\/datasets\/username___dataset_name` for the data\r\n`~\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/username___dataset_name` for the python files\r\n\r\nThis PR should fix the issue https:\/\/github.com\/huggingface\/datasets\/issues\/2842\r\n\r\ncc @stas00 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2938\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2938\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2937","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2937\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2937\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2937\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2937","id":999548277,"node_id":"I_kwDODunzps47k-V1","number":2937,"title":"load_dataset using default cache on Windows causes PermissionError: [WinError 5] Access is denied","user":{"login":"daqieq","id":40532020,"node_id":"MDQ6VXNlcjQwNTMyMDIw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40532020?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/daqieq","html_url":"https:\/\/github.com\/daqieq","followers_url":"https:\/\/api.github.com\/users\/daqieq\/followers","following_url":"https:\/\/api.github.com\/users\/daqieq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/daqieq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/daqieq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/daqieq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/daqieq\/orgs","repos_url":"https:\/\/api.github.com\/users\/daqieq\/repos","events_url":"https:\/\/api.github.com\/users\/daqieq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/daqieq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-17T16:52:10Z","updated_at":"2022-01-29T03:49:30Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nStandard process to download and load the wiki_bio dataset causes PermissionError in Windows 10 and 11.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nds = load_dataset('wiki_bio')\r\n```\r\n\r\n## Expected results\r\nIt is expected that the dataset downloads without any errors.\r\n\r\n## Actual results\r\nPermissionError see trace below:\r\n```\r\nUsing custom data configuration default\r\nDownloading and preparing dataset wiki_bio\/default (download: 318.53 MiB, generated: 736.94 MiB, post-processed: Unknown size, total: 1.03 GiB) to C:\\Users\\username\\.cache\\huggingface\\datasets\\wiki_bio\\default\\1.1.0\\5293ce565954ba965dada626f1e79684e98172d950371d266bf3caaf87e911c9...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"C:\\Users\\username\\.conda\\envs\\hf\\lib\\site-packages\\datasets\\load.py\", line 1112, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"C:\\Users\\username\\.conda\\envs\\hf\\lib\\site-packages\\datasets\\builder.py\", line 644, in download_and_prepare\r\n self._save_info()\r\n File \"C:\\Users\\username\\.conda\\envs\\hf\\lib\\contextlib.py\", line 120, in __exit__\r\n next(self.gen)\r\n File \"C:\\Users\\username\\.conda\\envs\\hf\\lib\\site-packages\\datasets\\builder.py\", line 598, in incomplete_dir\r\n os.rename(tmp_dir, dirname)\r\nPermissionError: [WinError 5] Access is denied: 'C:\\\\Users\\\\username\\\\.cache\\\\huggingface\\\\datasets\\\\wiki_bio\\\\default\\\\1.1.0\\\\5293ce565954ba965dada626f1e79684e98172d950371d266bf3caaf87e911c9.incomplete' -> 'C:\\\\Users\\\\username\\\\.cache\\\\huggingface\\\\datasets\\\\wiki_bio\\\\default\\\\1.1.0\\\\5293ce565954ba965dada626f1e79684e98172d950371d266bf3caaf87e911c9'\r\n```\r\nBy commenting out the os.rename() [L604](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/builder.py#L604) and the shutil.rmtree() [L607](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/builder.py#L607) lines, in my virtual environment, I was able to get the load process to complete, rename the directory manually and then rerun the `load_dataset('wiki_bio')` to get what I needed.\r\n\r\nIt seems that os.rename() in the `incomplete_dir` content manager is the culprit. Here's another project [Conan](https:\/\/github.com\/conan-io\/conan\/issues\/6560) with similar issue with os.rename() if it helps debug this issue.\r\n\r\n## Environment info\r\n- `datasets` version: 1.12.1\r\n- Platform: Windows-10-10.0.22449-SP0\r\n- Python version: 3.8.12\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2937\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2937\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2936","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2936\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2936\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2936\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2936","id":999521647,"node_id":"PR_kwDODunzps4r5knb","number":2936,"title":"Check that array is not Float as nan != nan","user":{"login":"Iwontbecreative","id":494951,"node_id":"MDQ6VXNlcjQ5NDk1MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/494951?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Iwontbecreative","html_url":"https:\/\/github.com\/Iwontbecreative","followers_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/followers","following_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/orgs","repos_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/repos","events_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-17T16:16:41Z","updated_at":"2021-09-21T09:39:05Z","closed_at":"2021-09-21T09:39:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2936","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2936","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2936.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2936.patch","merged_at":"2021-09-21T09:39:04Z"},"body":"The Exception wants to check for issues with StructArrays\/ListArrays but catches FloatArrays with value nan as nan != nan.\r\nPass on FloatArrays as we should not raise an Exception for them.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2936\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2936\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2935","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2935\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2935\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2935\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2935","id":999518469,"node_id":"PR_kwDODunzps4r5j8B","number":2935,"title":"Add Jigsaw unintended Bias","user":{"login":"Iwontbecreative","id":494951,"node_id":"MDQ6VXNlcjQ5NDk1MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/494951?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Iwontbecreative","html_url":"https:\/\/github.com\/Iwontbecreative","followers_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/followers","following_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/orgs","repos_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/repos","events_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Iwontbecreative\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-17T16:12:31Z","updated_at":"2021-09-24T10:41:52Z","closed_at":"2021-09-24T10:41:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2935","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2935","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2935.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2935.patch","merged_at":"2021-09-24T10:41:52Z"},"body":"Hi,\r\n\r\nHere's a first attempt at this dataset. Would be great if it could be merged relatively quickly as it is needed for Bigscience-related stuff. \r\n\r\nThis requires manual download, and I had some trouble generating dummy_data in this setting, so welcoming feedback there.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2935\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2935\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2934","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2934\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2934\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2934\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2934","id":999477413,"node_id":"I_kwDODunzps47ktCl","number":2934,"title":"to_tf_dataset keeps a reference to the open data somewhere, causing issues on windows","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-17T15:26:53Z","updated_at":"2021-10-13T09:03:23Z","closed_at":"2021-10-13T09:03:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"To reproduce:\r\n```python\r\nimport datasets as ds\r\nimport weakref\r\nimport gc\r\n\r\nd = ds.load_dataset(\"mnist\", split=\"train\")\r\nref = weakref.ref(d._data.table)\r\ntfd = d.to_tf_dataset(\"image\", batch_size=1, shuffle=False, label_cols=\"label\")\r\ndel tfd, d\r\ngc.collect()\r\nassert ref() is None, \"Error: there is at least one reference left\"\r\n```\r\n\r\nThis causes issues because the table holds a reference to an open arrow file that should be closed. So on windows it's not possible to delete or move the arrow file afterwards.\r\n\r\nMoreover the CI test of the `to_tf_dataset` method isn't able to clean up the temporary arrow files because of this.\r\n\r\ncc @Rocketknight1 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2934\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2934\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2933","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2933\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2933\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2933\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2933","id":999392566,"node_id":"PR_kwDODunzps4r5MHs","number":2933,"title":"Replace script_version with revision","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-17T14:04:39Z","updated_at":"2021-09-20T09:52:10Z","closed_at":"2021-09-20T09:52:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2933","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2933","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2933.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2933.patch","merged_at":"2021-09-20T09:52:10Z"},"body":"As discussed in https:\/\/github.com\/huggingface\/datasets\/pull\/2718#discussion_r707013278, the parameter name `script_version` is no longer applicable to datasets without loading script (i.e., datasets only with raw data files).\r\n\r\nThis PR replaces the parameter name `script_version` with `revision`.\r\n\r\nThis way, we are also aligned with:\r\n- Transformers: `AutoTokenizer.from_pretrained(..., revision=...)`\r\n- Hub: `HfApi.dataset_info(..., revision=...)`, `HfApi.upload_file(..., revision=...)`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2933\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2933\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2932","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2932\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2932\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2932\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2932","id":999317750,"node_id":"I_kwDODunzps47kGD2","number":2932,"title":"Conda build fails","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-09-17T12:49:22Z","updated_at":"2021-09-21T15:31:10Z","closed_at":"2021-09-21T15:31:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nCurrent `datasets` version in conda is 1.9 instead of 1.12.\r\n\r\nThe build of the conda package fails.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2932\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2932\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2931","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2931\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2931\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2931\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2931","id":998326359,"node_id":"PR_kwDODunzps4r1-JH","number":2931,"title":"Fix bug in to_tf_dataset","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-16T15:08:03Z","updated_at":"2021-09-16T17:01:38Z","closed_at":"2021-09-16T17:01:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2931","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2931","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2931.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2931.patch","merged_at":"2021-09-16T17:01:37Z"},"body":"Replace `set_format()` to `with_format()` so that we don't alter the original dataset in `to_tf_dataset()`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2931\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2931\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2930","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2930\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2930\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2930\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2930","id":998154311,"node_id":"I_kwDODunzps47fqBH","number":2930,"title":"Mutable columns argument breaks set_format","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"assignees":[{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-16T12:27:22Z","updated_at":"2021-09-16T13:50:53Z","closed_at":"2021-09-16T13:50:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIf you pass a mutable list to the `columns` argument of `set_format` and then change the list afterwards, the returned columns also change.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"glue\", \"cola\")\r\n\r\ncolumn_list = [\"idx\", \"label\"]\r\ndataset.set_format(\"python\", columns=column_list)\r\ncolumn_list[1] = \"foo\" # Change the list after we call `set_format`\r\ndataset['train'][:4].keys()\r\n```\r\n\r\n## Expected results\r\n```python\r\ndict_keys(['idx', 'label'])\r\n```\r\n\r\n## Actual results\r\n```python\r\ndict_keys(['idx'])\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2930\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2930\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2929","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2929\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2929\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2929\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2929","id":997960024,"node_id":"PR_kwDODunzps4r015C","number":2929,"title":"Add regression test for null Sequence","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-16T08:58:33Z","updated_at":"2021-09-17T08:23:59Z","closed_at":"2021-09-17T08:23:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2929","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2929","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2929.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2929.patch","merged_at":"2021-09-17T08:23:59Z"},"body":"Relates to #2892 and #2900.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2929\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2929\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2928","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2928\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2928\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2928\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2928","id":997941506,"node_id":"PR_kwDODunzps4r0yUb","number":2928,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-16T08:39:20Z","updated_at":"2021-09-16T12:35:34Z","closed_at":"2021-09-16T12:35:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2928","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2928","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2928.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2928.patch","merged_at":"2021-09-16T12:35:34Z"},"body":"Update BibTeX entry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2928\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2928\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2927","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2927\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2927\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2927\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2927","id":997654680,"node_id":"I_kwDODunzps47dwCY","number":2927,"title":"Datasets 1.12 dataset.filter TypeError: get_indices_from_mask_function() got an unexpected keyword argument","user":{"login":"timothyjlaurent","id":2000204,"node_id":"MDQ6VXNlcjIwMDAyMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2000204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timothyjlaurent","html_url":"https:\/\/github.com\/timothyjlaurent","followers_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/followers","following_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/orgs","repos_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/repos","events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-09-16T01:14:02Z","updated_at":"2021-09-20T16:23:22Z","closed_at":"2021-09-20T16:23:21Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nUpgrading to 1.12 caused `dataset.filter` call to fail with \r\n\r\n> get_indices_from_mask_function() got an unexpected keyword argument valid_rel_labels\r\n\r\n\r\n## Steps to reproduce the bug\r\n```pythondef \r\n\r\nfilter_good_rows(\r\n ex: Dict,\r\n valid_rel_labels: Set[str],\r\n valid_ner_labels: Set[str],\r\n tokenizer: PreTrainedTokenizerFast,\r\n) -> bool:\r\n \"\"\"Get the good rows\"\"\"\r\n encoding = get_encoding_for_text(text=ex[\"text\"], tokenizer=tokenizer)\r\n ex[\"encoding\"] = encoding\r\n for relation in ex[\"relations\"]:\r\n if not is_valid_relation(relation, valid_rel_labels):\r\n return False\r\n for span in ex[\"spans\"]:\r\n if not is_valid_span(span, valid_ner_labels, encoding):\r\n return False\r\n return True\r\n \r\ndef get_dataset(): \r\n loader_path = str(Path(__file__).parent \/ \"prodigy_dataset_builder.py\")\r\n ds = load_dataset(\r\n loader_path,\r\n name=\"prodigy-dataset\",\r\n data_files=sorted(file_paths),\r\n cache_dir=cache_dir,\r\n )[\"train\"]\r\n\r\n valid_ner_labels = set(vocab.ner_category)\r\n valid_relations = set(vocab.relation_types.keys())\r\n ds = ds.filter(\r\n filter_good_rows,\r\n fn_kwargs=dict(\r\n valid_rel_labels=valid_relations,\r\n valid_ner_labels=valid_ner_labels,\r\n tokenizer=vocab.tokenizer,\r\n ),\r\n keep_in_memory=True,\r\n num_proc=num_proc,\r\n )\r\n\r\n```\r\n\r\n`ds` is a `DatasetDict` produced by a jsonl dataset.\r\nThis runs fine on 1.11 but fails on 1.12\r\n\r\n**Stack Trace**\r\n\r\n\r\n\r\n## Expected results\r\n\r\nI expect 1.12 datasets filter to filter the dataset without raising as it does on 1.11\r\n\r\n## Actual results\r\n```\r\ntf_ner_rel_lib\/dataset.py:695: in load_prodigy_arrow_datasets_from_jsonl\r\n ds = ds.filter(\r\n..\/..\/..\/..\/.pyenv\/versions\/tf_ner_rel_lib\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py:185: in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n..\/..\/..\/..\/.pyenv\/versions\/tf_ner_rel_lib\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py:398: in wrapper\r\n out = func(self, *args, **kwargs)\r\n..\/..\/..\/..\/.pyenv\/versions\/tf_ner_rel_lib\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py:2169: in filter\r\n indices = self.map(\r\n..\/..\/..\/..\/.pyenv\/versions\/tf_ner_rel_lib\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py:1686: in map\r\n return self._map_single(\r\n..\/..\/..\/..\/.pyenv\/versions\/tf_ner_rel_lib\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py:185: in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n..\/..\/..\/..\/.pyenv\/versions\/tf_ner_rel_lib\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py:398: in wrapper\r\n out = func(self, *args, **kwargs)\r\n..\/..\/..\/..\/.pyenv\/versions\/tf_ner_rel_lib\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py:2048: in _map_single\r\n batch = apply_function_on_filtered_inputs(\r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \r\n\r\ninputs = {'_input_hash': [2108817714, 1477695082, -1021597032, 2130671338, -1260483858, -1203431639, ...], '_task_hash': [18070...ons', 'relations', 'relations', ...], 'answer': ['accept', 'accept', 'accept', 'accept', 'accept', 'accept', ...], ...}\r\nindices = [0, 1, 2, 3, 4, 5, ...], check_same_num_examples = False, offset = 0\r\n\r\n def apply_function_on_filtered_inputs(inputs, indices, check_same_num_examples=False, offset=0):\r\n \"\"\"Utility to apply the function on a selection of columns.\"\"\"\r\n nonlocal update_data\r\n fn_args = [inputs] if input_columns is None else [inputs[col] for col in input_columns]\r\n if offset == 0:\r\n effective_indices = indices\r\n else:\r\n effective_indices = [i + offset for i in indices] if isinstance(indices, list) else indices + offset\r\n processed_inputs = (\r\n> function(*fn_args, effective_indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n )\r\nE TypeError: get_indices_from_mask_function() got an unexpected keyword argument 'valid_rel_labels'\r\n\r\n..\/..\/..\/..\/.pyenv\/versions\/tf_ner_rel_lib\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py:1939: TypeError\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Mac\r\n- Python version: 3.8.9\r\n- PyArrow version: pyarrow==5.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2927\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2927\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2926","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2926\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2926\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2926\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2926","id":997463277,"node_id":"I_kwDODunzps47dBTt","number":2926,"title":"Error when downloading datasets to non-traditional cache directories","user":{"login":"dar-tau","id":45885627,"node_id":"MDQ6VXNlcjQ1ODg1NjI3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/45885627?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dar-tau","html_url":"https:\/\/github.com\/dar-tau","followers_url":"https:\/\/api.github.com\/users\/dar-tau\/followers","following_url":"https:\/\/api.github.com\/users\/dar-tau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dar-tau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dar-tau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dar-tau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dar-tau\/orgs","repos_url":"https:\/\/api.github.com\/users\/dar-tau\/repos","events_url":"https:\/\/api.github.com\/users\/dar-tau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dar-tau\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-15T19:59:46Z","updated_at":"2021-11-24T21:42:31Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen the cache directory is linked (soft link) to a directory on a NetApp device, the download fails. \r\n\r\n## Steps to reproduce the bug\r\n```bash\r\nln -s \/path\/to\/netapp\/.cache ~\/.cache\r\n```\r\n\r\n```python\r\nload_dataset(\"imdb\")\r\n```\r\n\r\n## Expected results\r\nSuccessfully loading IMDB dataset\r\n\r\n## Actual results\r\n```\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=33432835, \r\nnum_examples=25000, dataset_name='imdb'), 'recorded': SplitInfo(name='train', num_bytes=0, num_examples=0,\r\n dataset_name='imdb')}, {'expected': SplitInfo(name='test', num_bytes=32650697, num_examples=25000, dataset_name='imdb'),\r\n 'recorded': SplitInfo(name='test', num_bytes=659932, num_examples=503, dataset_name='imdb')}, {'expected':\r\n SplitInfo(name='unsupervised', num_bytes=67106814, num_examples=50000, dataset_name='imdb'), 'recorded':\r\n SplitInfo(name='unsupervised', num_bytes=0, num_examples=0, dataset_name='imdb')}]\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.1.2\r\n- Platform: Ubuntu \r\n- Python version: 3.8\r\n\r\n## Extra notes\r\nStranger yet, trying to debug the phenomenon, I found the range of results to vary a lot without clear direction:\r\n - With `cache_dir=\"\/path\/to\/netapp\/.cache\"` the same thing happens.\r\n - However, when linking `~\/netapp\/` to `\/path\/to\/netapp` *and* setting `cache_dir=\"~\/netapp\/.cache\/huggingface\/datasets\"` - it does work\r\n - On the other hand, when linking `~\/.cache` to `~\/netapp\/.cache` without using `cache_dir`, it does work anymore.\r\n\r\nWhile I could test it only for a NetApp device, it might have to do with any other mounted FS.\r\n\r\nThanks :)\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2926\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2926\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2925","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2925\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2925\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2925\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2925","id":997407034,"node_id":"PR_kwDODunzps4rzJ9s","number":2925,"title":"Add tutorial for no-code dataset upload","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-15T18:54:42Z","updated_at":"2021-09-27T17:51:55Z","closed_at":"2021-09-27T17:51:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2925","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2925","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2925.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2925.patch","merged_at":"2021-09-27T17:51:55Z"},"body":"This PR is for a tutorial for uploading a dataset to the Hub. It relies on the Hub UI elements to upload a dataset, introduces the online tagging tool for creating tags, and the Dataset card template to get a head start on filling it out. The addition of this tutorial should make it easier for beginners to upload a dataset without accessing the terminal or knowing Git.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2925\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2925\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2924","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2924\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2924\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2924\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2924","id":997378113,"node_id":"I_kwDODunzps47cshB","number":2924,"title":"\"File name too long\" error for file locks","user":{"login":"gar1t","id":184949,"node_id":"MDQ6VXNlcjE4NDk0OQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/184949?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gar1t","html_url":"https:\/\/github.com\/gar1t","followers_url":"https:\/\/api.github.com\/users\/gar1t\/followers","following_url":"https:\/\/api.github.com\/users\/gar1t\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gar1t\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gar1t\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gar1t\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gar1t\/orgs","repos_url":"https:\/\/api.github.com\/users\/gar1t\/repos","events_url":"https:\/\/api.github.com\/users\/gar1t\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gar1t\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":9,"created_at":"2021-09-15T18:16:50Z","updated_at":"2021-10-29T09:42:24Z","closed_at":"2021-10-29T09:42:24Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nGetting the following error when calling `load_dataset(\"gar1t\/test\")`:\r\n\r\n```\r\nOSError: [Errno 36] File name too long: '\/.cache\/huggingface\/datasets\/_home_garrett_.cache_huggingface_datasets_csv_test-7c856aea083a7043_0.0.0_9144e0a4e8435090117cea53e6c7537173ef2304525df4a077c435d8ee7828ff.incomplete.lock'\r\n```\r\n\r\n## Steps to reproduce the bug\r\n\r\nWhere the user cache dir (e.g. `~\/.cache`) is on a file system that limits filenames to 255 chars (e.g. ext4):\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset(\"gar1t\/test\")\r\n```\r\n\r\n## Expected results\r\n\r\nExpect the function to return without an error.\r\n\r\n## Actual results\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/lib\/python3.9\/site-packages\/datasets\/load.py\", line 1112, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 644, in download_and_prepare\r\n self._save_info()\r\n File \"\/lib\/python3.9\/site-packages\/datasets\/builder.py\", line 765, in _save_info\r\n with FileLock(lock_path):\r\n File \"\/lib\/python3.9\/site-packages\/datasets\/utils\/filelock.py\", line 323, in __enter__\r\n self.acquire()\r\n File \"\/lib\/python3.9\/site-packages\/datasets\/utils\/filelock.py\", line 272, in acquire\r\n self._acquire()\r\n File \"\/lib\/python3.9\/site-packages\/datasets\/utils\/filelock.py\", line 403, in _acquire\r\n fd = os.open(self._lock_file, open_mode)\r\nOSError: [Errno 36] File name too long: '\/.cache\/huggingface\/datasets\/_home_garrett_.cache_huggingface_datasets_csv_test-7c856aea083a7043_0.0.0_9144e0a4e8435090117cea53e6c7537173ef2304525df4a077c435d8ee7828ff.incomplete.lock'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1\r\n- Platform: Linux-5.11.0-27-generic-x86_64-with-glibc2.31\r\n- Python version: 3.9.7\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2924\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":1,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2924\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2923","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2923\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2923\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2923\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2923","id":997351590,"node_id":"I_kwDODunzps47cmCm","number":2923,"title":"Loading an autonlp dataset raises in normal mode but not in streaming mode","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-15T17:44:38Z","updated_at":"2021-10-22T09:36:09Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nThe same dataset (from autonlp) raises an error in normal mode, but does not raise in streaming mode\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nload_dataset(\"severo\/autonlp-data-sentiment_detection-3c8bcd36\", split=\"train\", streaming=False)\r\n## raises an error\r\n\r\nload_dataset(\"severo\/autonlp-data-sentiment_detection-3c8bcd36\", split=\"train\", streaming=True)\r\n## does not raise an error\r\n```\r\n\r\n## Expected results\r\n\r\nBoth calls should raise the same error\r\n\r\n## Actual results\r\n\r\nCall with streaming=False:\r\n\r\n```\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 5825.42it\/s]\r\nUsing custom data configuration autonlp-data-sentiment_detection-3c8bcd36-fe30267462d1d42b\r\nDownloading and preparing dataset json\/autonlp-data-sentiment_detection-3c8bcd36 to \/home\/slesage\/.cache\/huggingface\/datasets\/json\/autonlp-data-sentiment_detection-3c8bcd36-fe30267462d1d42b\/0.0.0\/d75ead8d5cfcbe67495df0f89bd262f0023257fbbbd94a730313295f3d756d50...\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 5\/5 [00:00<00:00, 15923.71it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 5\/5 [00:00<00:00, 3346.88it\/s]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1112, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 636, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 726, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 1187, in _prepare_split\r\n writer.write_table(table)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py\", line 418, in write_table\r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py\", line 418, in \r\n pa_table = pa.Table.from_arrays([pa_table[name] for name in self._schema.names], schema=self._schema)\r\n File \"pyarrow\/table.pxi\", line 1249, in pyarrow.lib.Table.__getitem__\r\n File \"pyarrow\/table.pxi\", line 1825, in pyarrow.lib.Table.column\r\n File \"pyarrow\/table.pxi\", line 1800, in pyarrow.lib.Table._ensure_integer_index\r\nKeyError: 'Field \"splits\" does not exist in table schema'\r\n```\r\n\r\nCall with `streaming=False`:\r\n\r\n```\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 6000.43it\/s]\r\nUsing custom data configuration autonlp-data-sentiment_detection-3c8bcd36-fe30267462d1d42b\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 5\/5 [00:00<00:00, 46916.15it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 5\/5 [00:00<00:00, 148734.18it\/s]\r\n```\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.1.dev0\r\n- Platform: Linux-5.11.0-1017-aws-x86_64-with-glibc2.29\r\n- Python version: 3.8.11\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2923\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2923\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2922","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2922\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2922\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2922\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2922","id":997332662,"node_id":"PR_kwDODunzps4ry6-s","number":2922,"title":"Fix conversion of multidim arrays in list to arrow","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-15T17:21:36Z","updated_at":"2021-09-15T17:22:52Z","closed_at":"2021-09-15T17:21:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2922","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2922","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2922.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2922.patch","merged_at":"2021-09-15T17:21:45Z"},"body":"Arrow only supports 1-dim arrays. Previously we were converting all the numpy arrays to python list before instantiating arrow arrays to workaround this limitation.\r\nHowever in #2361 we started to keep numpy arrays in order to keep their dtypes.\r\nIt works when we pass any multi-dim numpy array (the conversion to arrow has been added on our side), but not for lists of multi-dim numpy arrays.\r\n\r\nIn this PR I added two strategies:\r\n- one that takes a list of multi-dim numpy arrays on returns an arrow array in an optimized way (more common case)\r\n- one that takes a list of possibly very nested data (lists, dicts, tuples) containing multi-dim arrays. This one is less optimized since it converts all the multi-dim numpy arrays into lists of 1-d arrays for compatibility with arrow. This strategy is simpler that just trying to create the arrow array from a possibly very nested data structure, but in the future we can improve it if needed.\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2921","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2922\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2922\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2921","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2921\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2921\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2921\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2921","id":997325424,"node_id":"I_kwDODunzps47cfpw","number":2921,"title":"Using a list of multi-dim numpy arrays raises an error \"can only convert 1-dimensional array values\"","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-15T17:12:11Z","updated_at":"2021-09-15T17:21:45Z","closed_at":"2021-09-15T17:21:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This error has been introduced in https:\/\/github.com\/huggingface\/datasets\/pull\/2361\r\n\r\nTo reproduce:\r\n```python\r\nimport numpy as np\r\nfrom datasets import Dataset\r\n\r\nd = Dataset.from_dict({\"a\": [np.zeros((2, 2))]})\r\n```\r\nraises\r\n```python\r\nTraceback (most recent call last):\r\n File \"playground\/ttest.py\", line 5, in \r\n d = Dataset.from_dict({\"a\": [np.zeros((2, 2))]}).with_format(\"torch\")\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/arrow_dataset.py\", line 458, in from_dict\r\n pa_table = InMemoryTable.from_pydict(mapping=mapping)\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/table.py\", line 365, in from_pydict\r\n return cls(pa.Table.from_pydict(*args, **kwargs))\r\n File \"pyarrow\/table.pxi\", line 1639, in pyarrow.lib.Table.from_pydict\r\n File \"pyarrow\/array.pxi\", line 332, in pyarrow.lib.asarray\r\n File \"pyarrow\/array.pxi\", line 223, in pyarrow.lib.array\r\n File \"pyarrow\/array.pxi\", line 110, in pyarrow.lib._handle_arrow_array_protocol\r\n File \"\/Users\/quentinlhoest\/Desktop\/hf\/nlp\/src\/datasets\/arrow_writer.py\", line 107, in __arrow_array__\r\n out = pa.array(self.data, type=type)\r\n File \"pyarrow\/array.pxi\", line 306, in pyarrow.lib.array\r\n File \"pyarrow\/array.pxi\", line 39, in pyarrow.lib._sequence_to_array\r\n File \"pyarrow\/error.pxi\", line 143, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 99, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Can only convert 1-dimensional array values","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2921\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2921\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2920","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2920\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2920\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2920\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2920","id":997323014,"node_id":"PR_kwDODunzps4ry4_u","number":2920,"title":"Fix unwanted tqdm bar when accessing examples","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-15T17:09:11Z","updated_at":"2021-09-15T17:18:24Z","closed_at":"2021-09-15T17:18:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2920","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2920","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2920.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2920.patch","merged_at":"2021-09-15T17:18:23Z"},"body":"A change in #2814 added bad progress bars in `map_nested`. Now they're disabled by default\r\n\r\nFix #2919 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2920\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2920\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2919","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2919\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2919\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2919\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2919","id":997127487,"node_id":"I_kwDODunzps47bvU_","number":2919,"title":"Unwanted progress bars when accessing examples","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-15T14:05:10Z","updated_at":"2021-09-15T17:21:49Z","closed_at":"2021-09-15T17:18:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When accessing examples from a dataset formatted for pytorch, some progress bars appear when accessing examples:\r\n```python\r\nIn [1]: import datasets as ds \r\n\r\nIn [2]: d = ds.Dataset.from_dict({\"a\": [0, 1, 2]}).with_format(\"torch\") \r\n\r\nIn [3]: d[0] \r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 3172.70it\/s]\r\nOut[3]: {'a': tensor(0)}\r\n```\r\n\r\nThis is because the pytorch formatter calls `map_nested` that uses progress bars\r\n\r\ncc @sgugger ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2919\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2919\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2918","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2918\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2918\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2918\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2918","id":997063347,"node_id":"I_kwDODunzps47bfqz","number":2918,"title":"`Can not decode content-encoding: gzip` when loading `scitldr` dataset with streaming","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":3287858981,"node_id":"MDU6TGFiZWwzMjg3ODU4OTgx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/streaming","name":"streaming","color":"fef2c0","default":false,"description":""}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-09-15T13:06:07Z","updated_at":"2021-12-01T08:15:00Z","closed_at":"2021-12-01T08:15:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nTrying to load the `\"FullText\"` config of the `\"scitldr\"` dataset with `streaming=True` raises an error from `aiohttp`:\r\n```python\r\nClientPayloadError: 400, message='Can not decode content-encoding: gzip'\r\n```\r\n\r\ncc @lhoestq \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\niter_dset = iter(\r\n load_dataset(\"scitldr\", name=\"FullText\", split=\"test\", streaming=True)\r\n)\r\n\r\nnext(iter_dset)\r\n```\r\n\r\n## Expected results\r\nReturns the first sample of the dataset\r\n\r\n## Actual results\r\nCalling `__next__` crashes with the following Traceback:\r\n\r\n```python\r\n----> 1 next(dset_iter)\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\datasets\\iterable_dataset.py in __iter__(self)\r\n 339\r\n 340 def __iter__(self):\r\n--> 341 for key, example in self._iter():\r\n 342 if self.features:\r\n 343 # we encode the example for ClassLabel feature types for example\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\datasets\\iterable_dataset.py in _iter(self)\r\n 336 else:\r\n 337 ex_iterable = self._ex_iterable\r\n--> 338 yield from ex_iterable\r\n 339\r\n 340 def __iter__(self):\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\datasets\\iterable_dataset.py in __iter__(self)\r\n 76\r\n 77 def __iter__(self):\r\n---> 78 for key, example in self.generate_examples_fn(**self.kwargs):\r\n 79 yield key, example\r\n 80\r\n\r\n~\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\scitldr\\72d6e2195786c57e1d343066fb2cc4f93ea39c5e381e53e6ae7c44bbfd1f05ef\\scitldr.py in _generate_examples(self, filepath, split)\r\n 162\r\n 163 with open(filepath, encoding=\"utf-8\") as f:\r\n--> 164 for id_, row in enumerate(f):\r\n 165 data = json.loads(row)\r\n 166 if self.config.name == \"AIC\":\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\fsspec\\implementations\\http.py in read(self, length)\r\n 496 else:\r\n 497 length = min(self.size - self.loc, length)\r\n--> 498 return super().read(length)\r\n 499\r\n 500 async def async_fetch_all(self):\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\fsspec\\spec.py in read(self, length)\r\n 1481 # don't even bother calling fetch\r\n 1482 return b\"\"\r\n-> 1483 out = self.cache._fetch(self.loc, self.loc + length)\r\n 1484 self.loc += len(out)\r\n 1485 return out\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\fsspec\\caching.py in _fetch(self, start, end)\r\n 378 elif start < self.start:\r\n 379 if self.end - end > self.blocksize:\r\n--> 380 self.cache = self.fetcher(start, bend)\r\n 381 self.start = start\r\n 382 else:\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\fsspec\\asyn.py in wrapper(*args, **kwargs)\r\n 86 def wrapper(*args, **kwargs):\r\n 87 self = obj or args[0]\r\n---> 88 return sync(self.loop, func, *args, **kwargs)\r\n 89\r\n 90 return wrapper\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\fsspec\\asyn.py in sync(loop, func, timeout, *args, **kwargs)\r\n 67 raise FSTimeoutError\r\n 68 if isinstance(result[0], BaseException):\r\n---> 69 raise result[0]\r\n 70 return result[0]\r\n 71\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\fsspec\\asyn.py in _runner(event, coro, result, timeout)\r\n 23 coro = asyncio.wait_for(coro, timeout=timeout)\r\n 24 try:\r\n---> 25 result[0] = await coro\r\n 26 except Exception as ex:\r\n 27 result[0] = ex\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\fsspec\\implementations\\http.py in async_fetch_range(self, start, end)\r\n 538 if r.status == 206:\r\n 539 # partial content, as expected\r\n--> 540 out = await r.read()\r\n 541 elif \"Content-Length\" in r.headers:\r\n 542 cl = int(r.headers[\"Content-Length\"])\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\aiohttp\\client_reqrep.py in read(self)\r\n 1030 if self._body is None:\r\n 1031 try:\r\n-> 1032 self._body = await self.content.read()\r\n 1033 for trace in self._traces:\r\n 1034 await trace.send_response_chunk_received(\r\n\r\n~\\miniconda3\\envs\\datasets\\lib\\site-packages\\aiohttp\\streams.py in read(self, n)\r\n 342 async def read(self, n: int = -1) -> bytes:\r\n 343 if self._exception is not None:\r\n--> 344 raise self._exception\r\n 345\r\n 346 # migration problem; with DataQueue you have to catch\r\n\r\nClientPayloadError: 400, message='Can not decode content-encoding: gzip'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.8.5\r\n- PyArrow version: 2.0.0\r\n- aiohttp version: 3.7.4.post0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2918\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2918\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2917","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2917\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2917\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2917\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2917","id":997041658,"node_id":"I_kwDODunzps47baX6","number":2917,"title":"windows download abnormal","user":{"login":"wei1826676931","id":52347799,"node_id":"MDQ6VXNlcjUyMzQ3Nzk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52347799?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wei1826676931","html_url":"https:\/\/github.com\/wei1826676931","followers_url":"https:\/\/api.github.com\/users\/wei1826676931\/followers","following_url":"https:\/\/api.github.com\/users\/wei1826676931\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wei1826676931\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wei1826676931\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wei1826676931\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wei1826676931\/orgs","repos_url":"https:\/\/api.github.com\/users\/wei1826676931\/repos","events_url":"https:\/\/api.github.com\/users\/wei1826676931\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wei1826676931\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-15T12:45:35Z","updated_at":"2021-09-16T17:17:48Z","closed_at":"2021-09-16T17:17:48Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe script clearly exists (accessible from the browser), but the script download fails on windows. Then I tried it again and it can be downloaded normally on linux. why??\r\n## Steps to reproduce the bug\r\n```python3.7 + windows\r\n![image](https:\/\/user-images.githubusercontent.com\/52347799\/133436174-4303f847-55d5-434f-a749-08da3bb9b654.png)\r\n\r\n\r\n# Sample code to reproduce the bug\r\n```\r\n\r\n## Expected results\r\nIt can be downloaded normally.\r\n\r\n## Actual results\r\nit cann't\r\n\r\n## Environment info\r\n\r\n- `datasets` version:1.11.0\r\n- Platform:windows\r\n- Python version:3.7\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2917\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2917\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2916","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2916\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2916\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2916\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2916","id":997003661,"node_id":"PR_kwDODunzps4rx5ua","number":2916,"title":"Add OpenAI's pass@k code evaluation metric","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-09-15T12:05:43Z","updated_at":"2021-11-12T14:19:51Z","closed_at":"2021-11-12T14:19:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2916","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2916","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2916.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2916.patch","merged_at":"2021-11-12T14:19:50Z"},"body":"This PR introduces the `code_eval` metric which implements [OpenAI's code evaluation harness](https:\/\/github.com\/openai\/human-eval) introduced in the [Codex paper](https:\/\/arxiv.org\/abs\/2107.03374). It is heavily based on the original implementation and just adapts the interface to follow the `predictions`\/`references` convention.\r\n\r\nThe addition of this metric should enable the evaluation against the code evaluation datasets added in #2897 and #2893.\r\n\r\nA few open questions:\r\n\r\n- The implementation makes heavy use of multiprocessing which this PR does not touch. Is this conflicting with multiprocessing natively integrated in `datasets`?\r\n- This metric executes generated Python code and as such it poses dangers of executing malicious code. OpenAI addresses this issue by 1) commenting the `exec` call in the code so the user has to actively uncomment it and read the warning and 2) suggests using a sandbox environment (gVisor container). Should we add a similar safeguard? E.g. a prompt that needs to be answered when initialising the metric? Or at least a warning message?\r\n- Naming: the implementation sticks to the `predictions`\/`references` naming, however, the references are not reference solutions but unittest to test the solution. While reference solutions are also available they are not used. Should the naming be adapted?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2916\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2916\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2915","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2915\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2915\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2915\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2915","id":996870071,"node_id":"PR_kwDODunzps4rxfWb","number":2915,"title":"Fix fsspec AbstractFileSystem access","user":{"login":"pierre-godard","id":3969168,"node_id":"MDQ6VXNlcjM5NjkxNjg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3969168?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pierre-godard","html_url":"https:\/\/github.com\/pierre-godard","followers_url":"https:\/\/api.github.com\/users\/pierre-godard\/followers","following_url":"https:\/\/api.github.com\/users\/pierre-godard\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pierre-godard\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pierre-godard\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pierre-godard\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pierre-godard\/orgs","repos_url":"https:\/\/api.github.com\/users\/pierre-godard\/repos","events_url":"https:\/\/api.github.com\/users\/pierre-godard\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pierre-godard\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-15T09:39:20Z","updated_at":"2021-09-15T11:35:24Z","closed_at":"2021-09-15T11:35:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2915","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2915","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2915.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2915.patch","merged_at":"2021-09-15T11:35:24Z"},"body":"This addresses the issue from #2914 by changing the way fsspec's AbstractFileSystem is accessed.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2915\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2915\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2914","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2914\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2914\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2914\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2914","id":996770168,"node_id":"I_kwDODunzps47aYF4","number":2914,"title":"Having a dependency defining fsspec entrypoint raises an AttributeError when importing datasets","user":{"login":"pierre-godard","id":3969168,"node_id":"MDQ6VXNlcjM5NjkxNjg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3969168?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pierre-godard","html_url":"https:\/\/github.com\/pierre-godard","followers_url":"https:\/\/api.github.com\/users\/pierre-godard\/followers","following_url":"https:\/\/api.github.com\/users\/pierre-godard\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pierre-godard\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pierre-godard\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pierre-godard\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pierre-godard\/orgs","repos_url":"https:\/\/api.github.com\/users\/pierre-godard\/repos","events_url":"https:\/\/api.github.com\/users\/pierre-godard\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pierre-godard\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-15T07:54:06Z","updated_at":"2021-09-15T16:49:17Z","closed_at":"2021-09-15T16:49:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIn one of my project, I defined a custom fsspec filesystem with an entrypoint.\r\nMy guess is that by doing so, a variable named `spec` is created in the module `fsspec` (created by entering a for loop as there are entrypoints defined, see the loop in question [here](https:\/\/github.com\/intake\/filesystem_spec\/blob\/0589358d8a029ed6b60d031018f52be2eb721291\/fsspec\/__init__.py#L55)).\r\nSo that `fsspec.spec`, that was previously referring to the `spec` submodule, is now referring to that `spec` variable.\r\nThis make the import of datasets failing as it is using that `fsspec.spec`.\r\n\r\n## Steps to reproduce the bug\r\nI could reproduce the bug with a dummy poetry project.\r\n\r\nHere is the pyproject.toml:\r\n```toml\r\n[tool.poetry]\r\nname = \"debug-datasets\"\r\nversion = \"0.1.0\"\r\ndescription = \"\"\r\nauthors = [\"Pierre Godard\"]\r\n\r\n[tool.poetry.dependencies]\r\npython = \"^3.8\"\r\ndatasets = \"^1.11.0\"\r\n\r\n[tool.poetry.dev-dependencies]\r\n\r\n[build-system]\r\nrequires = [\"poetry-core>=1.0.0\"]\r\nbuild-backend = \"poetry.core.masonry.api\"\r\n\r\n[tool.poetry.plugins.\"fsspec.specs\"]\r\n\"file2\" = \"fsspec.implementations.local.LocalFileSystem\"\r\n```\r\n\r\nThe only other file being a `debug_datasets\/__init__.py` empty file.\r\n\r\nThe overall structure of the project is as follows:\r\n```\r\n.\r\n\u251c\u2500\u2500 pyproject.toml\r\n\u2514\u2500\u2500 debug_datasets\r\n \u2514\u2500\u2500 __init__.py\r\n```\r\n\r\nThen, within the project folder run:\r\n\r\n```\r\npoetry install\r\npoetry run python\r\n```\r\n\r\nAnd in the python interpreter, try to import `datasets`:\r\n\r\n```\r\nimport datasets\r\n```\r\n\r\n## Expected results\r\nThe import should run successfully.\r\n\r\n## Actual results\r\n\r\nHere is the trace of the error I get:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/godarpi\/.cache\/pypoetry\/virtualenvs\/debug-datasets-JuFzTKL--py3.8\/lib\/python3.8\/site-packages\/datasets\/__init__.py\", line 33, in \r\n from .arrow_dataset import Dataset, concatenate_datasets\r\n File \"\/home\/godarpi\/.cache\/pypoetry\/virtualenvs\/debug-datasets-JuFzTKL--py3.8\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 48, in \r\n from .filesystems import extract_path_from_uri, is_remote_filesystem\r\n File \"\/home\/godarpi\/.cache\/pypoetry\/virtualenvs\/debug-datasets-JuFzTKL--py3.8\/lib\/python3.8\/site-packages\/datasets\/filesystems\/__init__.py\", line 30, in \r\n def is_remote_filesystem(fs: fsspec.spec.AbstractFileSystem) -> bool:\r\nAttributeError: 'EntryPoint' object has no attribute 'AbstractFileSystem'\r\n```\r\n\r\n## Suggested fix\r\n\r\n`datasets\/filesystems\/__init__.py`, line 30, replace:\r\n```\r\n def is_remote_filesystem(fs: fsspec.spec.AbstractFileSystem) -> bool:\r\n```\r\nby:\r\n```\r\n def is_remote_filesystem(fs: fsspec.AbstractFileSystem) -> bool:\r\n```\r\n\r\nI will come up with a PR soon if this effectively solves the issue.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: WSL2 (Ubuntu 20.04.1 LTS)\r\n- Python version: 3.8.5\r\n- PyArrow version: 5.0.0\r\n- `fsspec` version: 2021.8.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2914\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2914\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2913","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2913\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2913\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2913\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2913","id":996436368,"node_id":"I_kwDODunzps47ZGmQ","number":2913,"title":"timit_asr dataset only includes one text phrase","user":{"login":"margotwagner","id":39107794,"node_id":"MDQ6VXNlcjM5MTA3Nzk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39107794?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/margotwagner","html_url":"https:\/\/github.com\/margotwagner","followers_url":"https:\/\/api.github.com\/users\/margotwagner\/followers","following_url":"https:\/\/api.github.com\/users\/margotwagner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/margotwagner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/margotwagner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/margotwagner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/margotwagner\/orgs","repos_url":"https:\/\/api.github.com\/users\/margotwagner\/repos","events_url":"https:\/\/api.github.com\/users\/margotwagner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/margotwagner\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-14T21:06:07Z","updated_at":"2021-09-15T08:05:19Z","closed_at":"2021-09-15T08:05:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe dataset 'timit_asr' only includes one text phrase. It only includes the transcription \"Would such an act of refusal be useful?\" multiple times rather than different phrases.\r\n\r\n## Steps to reproduce the bug\r\nNote: I am following the tutorial https:\/\/huggingface.co\/blog\/fine-tune-wav2vec2-english\r\n\r\n1. Install the dataset and other packages\r\n```python\r\n!pip install datasets>=1.5.0\r\n!pip install transformers==4.4.0\r\n!pip install soundfile\r\n!pip install jiwer\r\n```\r\n2. Load the dataset\r\n```python\r\nfrom datasets import load_dataset, load_metric\r\n\r\ntimit = load_dataset(\"timit_asr\")\r\n```\r\n3. Remove columns that we don't want\r\n```python\r\ntimit = timit.remove_columns([\"phonetic_detail\", \"word_detail\", \"dialect_region\", \"id\", \"sentence_type\", \"speaker_id\"])\r\n```\r\n4. Write a short function to display some random samples of the dataset.\r\n```python\r\nfrom datasets import ClassLabel\r\nimport random\r\nimport pandas as pd\r\nfrom IPython.display import display, HTML\r\n\r\ndef show_random_elements(dataset, num_examples=10):\r\n assert num_examples <= len(dataset), \"Can't pick more elements than there are in the dataset.\"\r\n picks = []\r\n for _ in range(num_examples):\r\n pick = random.randint(0, len(dataset)-1)\r\n while pick in picks:\r\n pick = random.randint(0, len(dataset)-1)\r\n picks.append(pick)\r\n \r\n df = pd.DataFrame(dataset[picks])\r\n display(HTML(df.to_html()))\r\n\r\nshow_random_elements(timit[\"train\"].remove_columns([\"file\"]))\r\n```\r\n\r\n## Expected results\r\n10 random different transcription phrases.\r\n\r\n## Actual results\r\n10 of the same transcription phrase \"Would such an act of refusal be useful?\"\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.4.1\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: not listed\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2913\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2913\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2912","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2912\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2912\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2912\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2912","id":996256005,"node_id":"PR_kwDODunzps4rvhgp","number":2912,"title":"Update link to Blog in docs footer","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-14T17:23:14Z","updated_at":"2021-09-15T07:59:23Z","closed_at":"2021-09-15T07:59:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2912","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2912","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2912.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2912.patch","merged_at":"2021-09-15T07:59:23Z"},"body":"Update link.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2912\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2912\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2911","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2911\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2911\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2911\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2911","id":996202598,"node_id":"PR_kwDODunzps4rvW7Y","number":2911,"title":"Fix exception chaining","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-14T16:19:29Z","updated_at":"2021-09-16T15:04:44Z","closed_at":"2021-09-16T15:04:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2911","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2911","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2911.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2911.patch","merged_at":"2021-09-16T15:04:44Z"},"body":"Fix exception chaining to avoid tracebacks with message: `During handling of the above exception, another exception occurred:`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2911\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2911\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2910","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2910\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2910\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2910\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2910","id":996149632,"node_id":"PR_kwDODunzps4rvL9N","number":2910,"title":"feat: \ud83c\udfb8 pass additional arguments to get private configs + info","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-14T15:24:19Z","updated_at":"2021-09-15T16:19:09Z","closed_at":"2021-09-15T16:19:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2910","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2910","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2910.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2910.patch","merged_at":null},"body":"`use_auth_token` can now be passed to the functions to get the configs\r\nor infos of private datasets on the hub","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2910\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2910\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2909","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2909\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2909\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2909\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2909","id":996002180,"node_id":"PR_kwDODunzps4rutdo","number":2909,"title":"fix anli splits","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-14T13:10:35Z","updated_at":"2021-10-13T11:27:49Z","closed_at":"2021-10-13T11:27:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2909","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2909","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2909.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2909.patch","merged_at":null},"body":"I can't run the tests for dummy data, facing this error \r\n\r\n`ImportError while loading conftest '\/home\/zaid\/tmp\/fix_anli_splits\/datasets\/tests\/conftest.py'.\r\ntests\/conftest.py:10: in \r\n from datasets import config\r\nE ImportError: cannot import name 'config' from 'datasets' (unknown location)`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2909\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2909\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2908","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2908\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2908\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2908\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2908","id":995970612,"node_id":"PR_kwDODunzps4rumwW","number":2908,"title":"Update Zenodo metadata with creator names and affiliation","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-14T12:39:37Z","updated_at":"2021-09-14T14:29:25Z","closed_at":"2021-09-14T14:29:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2908","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2908","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2908.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2908.patch","merged_at":"2021-09-14T14:29:25Z"},"body":"This PR helps in prefilling author data when automatically generating the DOI after each release.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2908\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2908\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2907","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2907\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2907\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2907\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2907","id":995968152,"node_id":"PR_kwDODunzps4rumOy","number":2907,"title":"add story_cloze dataset","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-14T12:36:53Z","updated_at":"2021-10-08T21:41:42Z","closed_at":"2021-10-08T21:41:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2907","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2907","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2907.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2907.patch","merged_at":null},"body":"@lhoestq I have spent some time but I still I can't succeed in correctly testing the dummy_data.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2907\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2907\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2906","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2906\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2906\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2906\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2906","id":995962905,"node_id":"PR_kwDODunzps4rulH-","number":2906,"title":"feat: \ud83c\udfb8 add a function to get a dataset config's split names","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-14T12:31:22Z","updated_at":"2021-10-04T09:55:38Z","closed_at":"2021-10-04T09:55:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2906","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2906","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2906.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2906.patch","merged_at":"2021-10-04T09:55:37Z"},"body":"Also: pass additional arguments (use_auth_token) to get private configs + info of private datasets on the hub\r\n\r\nQuestions:\r\n\r\n- [x] I'm not sure how the versions work: I changed 1.12.1.dev0 to 1.12.1.dev1, was it correct?\r\n -> no: reverted\r\n- [x] Should I add a section in https:\/\/github.com\/huggingface\/datasets\/blob\/master\/docs\/source\/load_hub.rst? (there is no section for get_dataset_infos)\r\n -> yes: added","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2906\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2906\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2905","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2905\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2905\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2905\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2905","id":995843964,"node_id":"PR_kwDODunzps4ruL5X","number":2905,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-14T10:16:17Z","updated_at":"2021-09-14T12:25:37Z","closed_at":"2021-09-14T12:25:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2905","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2905","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2905.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2905.patch","merged_at":"2021-09-14T12:25:37Z"},"body":"Update BibTeX entry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2905\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2905\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2904","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2904\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2904\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2904\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2904","id":995814222,"node_id":"I_kwDODunzps47WutO","number":2904,"title":"FORCE_REDOWNLOAD does not work","user":{"login":"anoopkatti","id":5278299,"node_id":"MDQ6VXNlcjUyNzgyOTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5278299?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anoopkatti","html_url":"https:\/\/github.com\/anoopkatti","followers_url":"https:\/\/api.github.com\/users\/anoopkatti\/followers","following_url":"https:\/\/api.github.com\/users\/anoopkatti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anoopkatti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anoopkatti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anoopkatti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anoopkatti\/orgs","repos_url":"https:\/\/api.github.com\/users\/anoopkatti\/repos","events_url":"https:\/\/api.github.com\/users\/anoopkatti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anoopkatti\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-14T09:45:26Z","updated_at":"2021-10-06T09:37:19Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWith GenerateMode.FORCE_REDOWNLOAD, the documentation says \r\n +------------------------------------+-----------+---------+\r\n | | Downloads | Dataset |\r\n +====================================+===========+=========+\r\n | `REUSE_DATASET_IF_EXISTS` (default)| Reuse | Reuse |\r\n +------------------------------------+-----------+---------+\r\n | `REUSE_CACHE_IF_EXISTS` | Reuse | Fresh |\r\n +------------------------------------+-----------+---------+\r\n | `FORCE_REDOWNLOAD` | Fresh | Fresh |\r\n +------------------------------------+-----------+---------+\r\n\r\nHowever, the old dataset is loaded even when FORCE_REDOWNLOAD is chosen.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n\r\nimport pandas as pd\r\nfrom datasets import load_dataset, GenerateMode\r\npd.DataFrame(range(5), columns=['numbers']).to_csv('\/tmp\/test.tsv.gz', index=False)\r\nee = load_dataset('csv', data_files=['\/tmp\/test.tsv.gz'], delimiter='\\t', split='train', download_mode=GenerateMode.FORCE_REDOWNLOAD)\r\nprint(ee)\r\npd.DataFrame(range(10), columns=['numerals']).to_csv('\/tmp\/test.tsv.gz', index=False)\r\nee = load_dataset('csv', data_files=['\/tmp\/test.tsv.gz'], delimiter='\\t', split='train', download_mode=GenerateMode.FORCE_REDOWNLOAD)\r\nprint(ee)\r\n\r\n```\r\n\r\n## Expected results\r\nDataset({\r\n features: ['numbers'],\r\n num_rows: 5\r\n})\r\nDataset({\r\n features: ['numerals'],\r\n num_rows: 10\r\n})\r\n\r\n## Actual results\r\nDataset({\r\n features: ['numbers'],\r\n num_rows: 5\r\n})\r\nDataset({\r\n features: ['numbers'],\r\n num_rows: 5\r\n})\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-4.14.181-108.257.amzn1.x86_64-x86_64-with-glibc2.10\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2904\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2904\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2903","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2903\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2903\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2903\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2903","id":995715191,"node_id":"PR_kwDODunzps4rtxxV","number":2903,"title":"Fix xpathopen to accept positional arguments","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-14T08:02:50Z","updated_at":"2021-09-14T08:51:21Z","closed_at":"2021-09-14T08:40:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2903","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2903","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2903.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2903.patch","merged_at":"2021-09-14T08:40:47Z"},"body":"Fix `xpathopen()` so that it also accepts positional arguments.\r\n\r\nFix #2901.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2903\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2903\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2902","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2902\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2902\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2902\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2902","id":995254216,"node_id":"MDU6SXNzdWU5OTUyNTQyMTY=","number":2902,"title":"Add WIT Dataset","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-09-13T19:38:49Z","updated_at":"2021-09-27T17:46:55Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *WIT*\r\n- **Description:** *Wikipedia-based Image Text Dataset*\r\n- **Paper:** *[WIT: Wikipedia-based Image Text Dataset for Multimodal Multilingual Machine Learning\r\n](https:\/\/arxiv.org\/abs\/2103.01913)*\r\n- **Data:** *https:\/\/github.com\/google-research-datasets\/wit*\r\n- **Motivation:** (excerpt from their Github README.md)\r\n\r\n> - The largest multimodal dataset (publicly available at the time of this writing) by the number of image-text examples.\r\n> - A massively multilingual dataset (first of its kind) with coverage for over 100+ languages.\r\n> - A collection of diverse set of concepts and real world entities.\r\n> - Brings forth challenging real-world test sets.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2902\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2902\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2901","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2901\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2901\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2901\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2901","id":995232844,"node_id":"MDU6SXNzdWU5OTUyMzI4NDQ=","number":2901,"title":"Incompatibility with pytest","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-13T19:12:17Z","updated_at":"2021-09-14T08:40:47Z","closed_at":"2021-09-14T08:40:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\npytest complains about xpathopen \/ path.open(\"w\")\r\n\r\n## Steps to reproduce the bug\r\n\r\nCreate a test file, `test.py`:\r\n\r\n```python\r\nimport datasets as ds\r\ndef load_dataset():\r\n ds.load_dataset(\"counter\", split=\"train\", streaming=True)\r\n```\r\n\r\nAnd launch it with pytest:\r\n\r\n```bash\r\npython -m pytest test.py\r\n```\r\n\r\n## Expected results\r\n\r\nIt should give something like:\r\n\r\n```\r\ncollected 1 item\r\n\r\ntest.py . [100%]\r\n\r\n======= 1 passed in 3.15s =======\r\n```\r\n\r\n## Actual results\r\n\r\n```\r\n============================================================================================================================= test session starts ==============================================================================================================================\r\nplatform linux -- Python 3.8.11, pytest-6.2.5, py-1.10.0, pluggy-1.0.0\r\nrootdir: \/home\/slesage\/hf\/datasets-preview-backend, configfile: pyproject.toml\r\nplugins: anyio-3.3.1\r\ncollected 1 item\r\n\r\ntests\/queries\/test_rows.py . [100%]Traceback (most recent call last):\r\n File \"\/home\/slesage\/.pyenv\/versions\/3.8.11\/lib\/python3.8\/runpy.py\", line 194, in _run_module_as_main\r\n return _run_code(code, main_globals, None,\r\n File \"\/home\/slesage\/.pyenv\/versions\/3.8.11\/lib\/python3.8\/runpy.py\", line 87, in _run_code\r\n exec(code, run_globals)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pytest\/__main__.py\", line 5, in \r\n raise SystemExit(pytest.console_main())\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/_pytest\/config\/__init__.py\", line 185, in console_main\r\n code = main()\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/_pytest\/config\/__init__.py\", line 162, in main\r\n ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pluggy\/_hooks.py\", line 265, in __call__\r\n return self._hookexec(self.name, self.get_hookimpls(), kwargs, firstresult)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pluggy\/_manager.py\", line 80, in _hookexec\r\n return self._inner_hookexec(hook_name, methods, kwargs, firstresult)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pluggy\/_callers.py\", line 60, in _multicall\r\n return outcome.get_result()\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pluggy\/_result.py\", line 60, in get_result\r\n raise ex[1].with_traceback(ex[2])\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pluggy\/_callers.py\", line 39, in _multicall\r\n res = hook_impl.function(*args)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/_pytest\/main.py\", line 316, in pytest_cmdline_main\r\n return wrap_session(config, _main)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/_pytest\/main.py\", line 304, in wrap_session\r\n config.hook.pytest_sessionfinish(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pluggy\/_hooks.py\", line 265, in __call__\r\n return self._hookexec(self.name, self.get_hookimpls(), kwargs, firstresult)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pluggy\/_manager.py\", line 80, in _hookexec\r\n return self._inner_hookexec(hook_name, methods, kwargs, firstresult)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pluggy\/_callers.py\", line 55, in _multicall\r\n gen.send(outcome)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/_pytest\/terminal.py\", line 803, in pytest_sessionfinish\r\n outcome.get_result()\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pluggy\/_result.py\", line 60, in get_result\r\n raise ex[1].with_traceback(ex[2])\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/pluggy\/_callers.py\", line 39, in _multicall\r\n res = hook_impl.function(*args)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/_pytest\/cacheprovider.py\", line 428, in pytest_sessionfinish\r\n config.cache.set(\"cache\/nodeids\", sorted(self.cached_nodeids))\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/_pytest\/cacheprovider.py\", line 188, in set\r\n f = path.open(\"w\")\r\nTypeError: xpathopen() takes 1 positional argument but 2 were given\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.12.0\r\n- Platform: Linux-5.11.0-1017-aws-x86_64-with-glibc2.29\r\n- Python version: 3.8.11\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2901\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2901\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2900","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2900\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2900\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2900\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2900","id":994922580,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMyNzczNDkw","number":2900,"title":"Fix null sequence encoding","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-13T13:55:08Z","updated_at":"2021-09-13T14:17:43Z","closed_at":"2021-09-13T14:17:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2900","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2900","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2900.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2900.patch","merged_at":"2021-09-13T14:17:42Z"},"body":"The Sequence feature encoding was failing when a `None` sequence was used in a dataset.\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2892","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2900\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2900\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2899","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2899\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2899\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2899\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2899","id":994082432,"node_id":"MDU6SXNzdWU5OTQwODI0MzI=","number":2899,"title":"Dataset","user":{"login":"rcacho172","id":90449239,"node_id":"MDQ6VXNlcjkwNDQ5MjM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90449239?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rcacho172","html_url":"https:\/\/github.com\/rcacho172","followers_url":"https:\/\/api.github.com\/users\/rcacho172\/followers","following_url":"https:\/\/api.github.com\/users\/rcacho172\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rcacho172\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rcacho172\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rcacho172\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rcacho172\/orgs","repos_url":"https:\/\/api.github.com\/users\/rcacho172\/repos","events_url":"https:\/\/api.github.com\/users\/rcacho172\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rcacho172\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-12T07:38:53Z","updated_at":"2021-09-12T16:12:15Z","closed_at":"2021-09-12T16:12:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2899\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2899\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2898","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2898\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2898\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2898\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2898","id":994032814,"node_id":"MDU6SXNzdWU5OTQwMzI4MTQ=","number":2898,"title":"Hug emoji","user":{"login":"Jackg-08","id":90539794,"node_id":"MDQ6VXNlcjkwNTM5Nzk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90539794?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Jackg-08","html_url":"https:\/\/github.com\/Jackg-08","followers_url":"https:\/\/api.github.com\/users\/Jackg-08\/followers","following_url":"https:\/\/api.github.com\/users\/Jackg-08\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Jackg-08\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Jackg-08\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Jackg-08\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Jackg-08\/orgs","repos_url":"https:\/\/api.github.com\/users\/Jackg-08\/repos","events_url":"https:\/\/api.github.com\/users\/Jackg-08\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Jackg-08\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-12T03:27:51Z","updated_at":"2021-09-12T16:13:13Z","closed_at":"2021-09-12T16:13:13Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2898\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2898\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2897","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2897\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2897\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2897\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2897","id":993798386,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMxOTA0ODk4","number":2897,"title":"Add OpenAI's HumanEval dataset","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-11T09:37:47Z","updated_at":"2021-09-16T15:02:11Z","closed_at":"2021-09-16T15:02:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2897","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2897","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2897.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2897.patch","merged_at":"2021-09-16T15:02:11Z"},"body":"This PR adds OpenAI's [HumanEval](https:\/\/github.com\/openai\/human-eval) dataset. The dataset consists of 164 handcrafted programming problems with solutions and unittests to verify solution. This dataset is useful to evaluate code generation models.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2897\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2897\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2896","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2896\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2896\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2896\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2896","id":993613113,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMxNzcwMTE3","number":2896,"title":"add multi-proc in `to_csv`","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-10T21:35:09Z","updated_at":"2021-10-28T05:47:33Z","closed_at":"2021-10-26T16:00:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2896","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2896","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2896.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2896.patch","merged_at":"2021-10-26T16:00:41Z"},"body":"This PR extends the multi-proc method used in #2747 for`to_json` to `to_csv` as well. \r\n\r\nResults on my machine post benchmarking on `ascent_kb` dataset (giving ~45% improvement when compared to num_proc = 1):\r\n```\r\nTime taken on 1 num_proc, 10000 batch_size 674.2055702209473\r\nTime taken on 4 num_proc, 10000 batch_size 425.6553490161896\r\n\r\nTime taken on 1 num_proc, 50000 batch_size 623.5897650718689\r\nTime taken on 4 num_proc, 50000 batch_size 380.0402421951294\r\n\r\nTime taken on 4 num_proc, 100000 batch_size 361.7168130874634\r\n```\r\nThis is a WIP as writing tests is pending for this PR. \r\n\r\nI'm also exploring [this](https:\/\/arrow.apache.org\/docs\/python\/csv.html#incremental-writing) approach for which I'm using `pyarrow-5.0.0`.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2896\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2896\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2895","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2895\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2895\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2895\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2895","id":993462274,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMxNjQ0NTY2","number":2895,"title":"Use pyarrow.Table.replace_schema_metadata instead of pyarrow.Table.cast","user":{"login":"arsarabi","id":12345848,"node_id":"MDQ6VXNlcjEyMzQ1ODQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12345848?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arsarabi","html_url":"https:\/\/github.com\/arsarabi","followers_url":"https:\/\/api.github.com\/users\/arsarabi\/followers","following_url":"https:\/\/api.github.com\/users\/arsarabi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arsarabi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arsarabi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arsarabi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arsarabi\/orgs","repos_url":"https:\/\/api.github.com\/users\/arsarabi\/repos","events_url":"https:\/\/api.github.com\/users\/arsarabi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arsarabi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-10T17:56:57Z","updated_at":"2021-09-21T22:50:01Z","closed_at":"2021-09-21T08:18:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2895","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2895","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2895.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2895.patch","merged_at":"2021-09-21T08:18:35Z"},"body":"This PR partially addresses #2252.\r\n\r\n``update_metadata_with_features`` uses ``Table.cast`` which slows down ``load_from_disk`` (and possibly other methods that use it) for very large datasets. Since ``update_metadata_with_features`` is only updating the schema metadata, it makes more sense to use ``pyarrow.Table.replace_schema_metadata`` which is much faster. This PR adds a ``replace_schema_metadata`` method to all table classes, and modifies ``update_metadata_with_features`` to use it instead of ``cast``.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2895\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2895\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2894","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2894\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2894\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2894\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2894","id":993375654,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMxNTcxODc5","number":2894,"title":"Fix COUNTER dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-10T16:07:29Z","updated_at":"2021-09-10T16:27:45Z","closed_at":"2021-09-10T16:27:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2894","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2894","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2894.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2894.patch","merged_at":"2021-09-10T16:27:44Z"},"body":"Fix filename generating `FileNotFoundError`.\r\n\r\nRelated to #2866.\r\nCC: @severo.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2894\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2894\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2893","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2893\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2893\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2893\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2893","id":993342781,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMxNTQ0NDQz","number":2893,"title":"add mbpp dataset","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-10T15:27:30Z","updated_at":"2021-09-16T09:35:42Z","closed_at":"2021-09-16T09:35:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2893","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2893","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2893.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2893.patch","merged_at":"2021-09-16T09:35:42Z"},"body":"This PR adds the mbpp dataset introduced by Google [here](https:\/\/github.com\/google-research\/google-research\/tree\/master\/mbpp) as mentioned in #2816.\r\n\r\nThe dataset contain two versions: a full and a sanitized one. They have a slightly different schema and it is current state the loading preserves the original schema. An open question is whether to harmonize the two schemas when loading the dataset or to preserve the original one. Since not all fields are overlapping the schema will not be exactly the same.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2893\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2893\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2892","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2892\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2892\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2892\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2892","id":993274572,"node_id":"MDU6SXNzdWU5OTMyNzQ1NzI=","number":2892,"title":"Error when encoding a dataset with None objects with a Sequence feature","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-10T14:11:43Z","updated_at":"2021-09-13T14:18:13Z","closed_at":"2021-09-13T14:17:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"There is an error when encoding a dataset with None objects with a Sequence feature\r\n\r\nTo reproduce:\r\n```python\r\nfrom datasets import Dataset, Features, Value, Sequence\r\ndata = {\"a\": [[0], None]}\r\nfeatures = Features({\"a\": Sequence(Value(\"int32\"))})\r\ndataset = Dataset.from_dict(data, features=features)\r\n```\r\nraises\r\n\r\n```python\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n 2 data = {\"a\": [[0], None]}\r\n 3 features = Features({\"a\": Sequence(Value(\"int32\"))})\r\n----> 4 dataset = Dataset.from_dict(data, features=features)\r\n[...]\r\n~\/datasets\/features.py in encode_nested_example(schema, obj)\r\n 888 if isinstance(obj, str): # don't interpret a string as a list\r\n 889 raise ValueError(\"Got a string but expected a list instead: '{}'\".format(obj))\r\n--> 890 return [encode_nested_example(schema.feature, o) for o in obj]\r\n 891 # Object with special encoding:\r\n 892 # ClassLabel will convert from string to int, TranslationVariableLanguages does some checks\r\n\r\nTypeError: 'NoneType' object is not iterable\r\n```\r\n\r\nInstead, if should run without error, as if the `features` were not passed","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2892\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2892\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2891","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2891\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2891\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2891\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2891","id":993161984,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMxMzkwNjM2","number":2891,"title":"Allow dynamic first dimension for ArrayXD","user":{"login":"rpowalski","id":10357417,"node_id":"MDQ6VXNlcjEwMzU3NDE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10357417?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rpowalski","html_url":"https:\/\/github.com\/rpowalski","followers_url":"https:\/\/api.github.com\/users\/rpowalski\/followers","following_url":"https:\/\/api.github.com\/users\/rpowalski\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rpowalski\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rpowalski\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rpowalski\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rpowalski\/orgs","repos_url":"https:\/\/api.github.com\/users\/rpowalski\/repos","events_url":"https:\/\/api.github.com\/users\/rpowalski\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rpowalski\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-09-10T11:52:52Z","updated_at":"2021-11-23T15:33:13Z","closed_at":"2021-10-29T09:37:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2891","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2891","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2891.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2891.patch","merged_at":"2021-10-29T09:37:17Z"},"body":"Add support for dynamic first dimension for ArrayXD features. See issue [#887](https:\/\/github.com\/huggingface\/datasets\/issues\/887).\r\nFollowing changes allow for `to_pylist` method of `ArrayExtensionArray` to return a list of numpy arrays where fist dimension can vary.\r\n\r\n@lhoestq Could you suggest how you want to extend test suit. For now I added only very limited testing.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2891\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2891\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2890","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2890\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2890\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2890\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2890","id":993074102,"node_id":"MDU6SXNzdWU5OTMwNzQxMDI=","number":2890,"title":"0x290B112ED1280537B24Ee6C268a004994a16e6CE","user":{"login":"rcacho172","id":90449239,"node_id":"MDQ6VXNlcjkwNDQ5MjM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90449239?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rcacho172","html_url":"https:\/\/github.com\/rcacho172","followers_url":"https:\/\/api.github.com\/users\/rcacho172\/followers","following_url":"https:\/\/api.github.com\/users\/rcacho172\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rcacho172\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rcacho172\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rcacho172\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rcacho172\/orgs","repos_url":"https:\/\/api.github.com\/users\/rcacho172\/repos","events_url":"https:\/\/api.github.com\/users\/rcacho172\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rcacho172\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-10T09:51:17Z","updated_at":"2021-09-10T11:45:29Z","closed_at":"2021-09-10T11:45:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2890\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2890\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2889","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2889\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2889\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2889\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2889","id":992968382,"node_id":"MDU6SXNzdWU5OTI5NjgzODI=","number":2889,"title":"Coc","user":{"login":"Bwiggity","id":90444264,"node_id":"MDQ6VXNlcjkwNDQ0MjY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90444264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Bwiggity","html_url":"https:\/\/github.com\/Bwiggity","followers_url":"https:\/\/api.github.com\/users\/Bwiggity\/followers","following_url":"https:\/\/api.github.com\/users\/Bwiggity\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Bwiggity\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Bwiggity\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Bwiggity\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Bwiggity\/orgs","repos_url":"https:\/\/api.github.com\/users\/Bwiggity\/repos","events_url":"https:\/\/api.github.com\/users\/Bwiggity\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Bwiggity\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-10T07:32:07Z","updated_at":"2021-09-10T11:45:54Z","closed_at":"2021-09-10T11:45:54Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2889\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2889\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2888","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2888\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2888\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2888\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2888","id":992676535,"node_id":"MDU6SXNzdWU5OTI2NzY1MzU=","number":2888,"title":"v1.11.1 release date","user":{"login":"fcakyon","id":34196005,"node_id":"MDQ6VXNlcjM0MTk2MDA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34196005?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fcakyon","html_url":"https:\/\/github.com\/fcakyon","followers_url":"https:\/\/api.github.com\/users\/fcakyon\/followers","following_url":"https:\/\/api.github.com\/users\/fcakyon\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fcakyon\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fcakyon\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fcakyon\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fcakyon\/orgs","repos_url":"https:\/\/api.github.com\/users\/fcakyon\/repos","events_url":"https:\/\/api.github.com\/users\/fcakyon\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fcakyon\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-09T21:53:15Z","updated_at":"2021-09-12T20:18:35Z","closed_at":"2021-09-12T16:15:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello, i need to use latest features in one of my packages but there have been no new datasets release since 2 months ago.\r\n\r\nWhen do you plan to publush v1.11.1 release?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2888\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2888\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2887","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2887\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2887\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2887\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2887","id":992576305,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMwODg4MTU3","number":2887,"title":"#2837 Use cache folder for lockfile","user":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-09T19:55:56Z","updated_at":"2021-10-05T17:58:22Z","closed_at":"2021-10-05T17:58:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2887","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2887","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2887.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2887.patch","merged_at":"2021-10-05T17:58:22Z"},"body":"Fixes #2837 \r\n\r\nUse a cache folder directory to store the FileLock.\r\n\r\nThe issue was that the lock file was in a readonly folder.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2887\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2887\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2886","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2886\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2886\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2886\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2886","id":992534632,"node_id":"MDU6SXNzdWU5OTI1MzQ2MzI=","number":2886,"title":"Hj","user":{"login":"Noorasri","id":90416328,"node_id":"MDQ6VXNlcjkwNDE2MzI4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/90416328?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Noorasri","html_url":"https:\/\/github.com\/Noorasri","followers_url":"https:\/\/api.github.com\/users\/Noorasri\/followers","following_url":"https:\/\/api.github.com\/users\/Noorasri\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Noorasri\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Noorasri\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Noorasri\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Noorasri\/orgs","repos_url":"https:\/\/api.github.com\/users\/Noorasri\/repos","events_url":"https:\/\/api.github.com\/users\/Noorasri\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Noorasri\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-09T18:58:52Z","updated_at":"2021-09-10T11:46:29Z","closed_at":"2021-09-10T11:46:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2886\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2886\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2885","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2885\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2885\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2885\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2885","id":992160544,"node_id":"MDU6SXNzdWU5OTIxNjA1NDQ=","number":2885,"title":"Adding an Elastic Search index to a Dataset","user":{"login":"MotzWanted","id":36195371,"node_id":"MDQ6VXNlcjM2MTk1Mzcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36195371?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MotzWanted","html_url":"https:\/\/github.com\/MotzWanted","followers_url":"https:\/\/api.github.com\/users\/MotzWanted\/followers","following_url":"https:\/\/api.github.com\/users\/MotzWanted\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MotzWanted\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MotzWanted\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MotzWanted\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MotzWanted\/orgs","repos_url":"https:\/\/api.github.com\/users\/MotzWanted\/repos","events_url":"https:\/\/api.github.com\/users\/MotzWanted\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MotzWanted\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-09T12:21:39Z","updated_at":"2021-10-20T18:57:11Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen trying to index documents from the squad dataset, the connection to ElasticSearch seems to break:\r\n\r\nReusing dataset squad (\/Users\/andreasmotz\/.cache\/huggingface\/datasets\/squad\/plain_text\/1.0.0\/d6ec3ceb99ca480ce37cdd35555d6cb2511d223b9150cce08a837ef62ffea453)\r\n 90%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 9501\/10570 [00:01<00:00, 6335.61docs\/s]\r\n\r\nNo error is thrown, but the indexing breaks ~90%.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nfrom datasets import load_dataset\r\nfrom elasticsearch import Elasticsearch\r\nes = Elasticsearch()\r\nsquad = load_dataset('squad', split='validation')\r\nindex_name = \"corpus\"\r\nes_config = {\r\n \"settings\": {\r\n \"number_of_shards\": 1,\r\n \"analysis\": {\"analyzer\": {\"stop_standard\": {\"type\": \"standard\", \" stopwords\": \"_english_\"}}},\r\n },\r\n \"mappings\": {\r\n \"properties\": {\r\n \"idx\" : {\"type\" : \"keyword\"},\r\n \"title\" : {\"type\" : \"keyword\"},\r\n \"text\": {\r\n \"type\": \"text\",\r\n \"analyzer\": \"standard\",\r\n \"similarity\": \"BM25\"\r\n },\r\n }\r\n },\r\n}\r\nclass IndexBuilder:\r\n \"\"\"\r\n Elastic search indexing of a corpus\r\n \"\"\"\r\n def __init__(\r\n self,\r\n *args,\r\n #corpus : None,\r\n dataset : squad,\r\n index_name = str,\r\n query = str,\r\n config = dict,\r\n **kwargs,\r\n ):\r\n #instantiate HuggingFace dataset\r\n self.dataset = dataset\r\n #instantiate ElasticSearch config\r\n self.config = config\r\n self.es = Elasticsearch()\r\n self.index_name = index_name\r\n self.query = query\r\n def elastic_index(self):\r\n print(self.es.info)\r\n self.es.indices.delete(index=self.index_name, ignore=[400, 404])\r\n search_index = self.dataset.add_elasticsearch_index(column='context', host='localhost', port='9200', es_index_name=self.index_name, es_index_config=self.config)\r\n return search_index\r\n def exact_match_method(self, index):\r\n scores, retrieved_examples = index.get_nearest_examples('context', query=self.query, k=1)\r\n return scores, retrieved_examples\r\nif __name__ == \"__main__\":\r\n print(type(squad))\r\n Index = IndexBuilder(dataset=squad, index_name='corpus_index', query='Where was Chopin born?', config=es_config)\r\n search_index = Index.elastic_index()\r\n scores, examples = Index.exact_match_method(search_index)\r\n print(scores, examples)\r\n for name in squad.column_names:\r\n print(type(squad[name]))\r\n```\r\n\r\n## Environment info\r\nWe run the code in Poetry. This might be the issue, since the script runs successfully in our local environment.\r\n\r\nPoetry:\r\n- Python version: 3.8\r\n- PyArrow: 4.0.1\r\n- Elasticsearch: 7.13.4\r\n- datasets: 1.10.2\r\n\r\nLocal:\r\n- Python version: 3.8\r\n- PyArrow: 3.0.0\r\n- Elasticsearch: 7.7.1\r\n- datasets: 1.7.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2885\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2885\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2884","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2884\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2884\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2884\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2884","id":992135698,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMwNTA4MTE1","number":2884,"title":"Add IC, SI, ER tasks to SUPERB","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-09-09T11:56:03Z","updated_at":"2021-09-20T09:17:58Z","closed_at":"2021-09-20T09:00:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2884","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2884","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2884.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2884.patch","merged_at":"2021-09-20T09:00:49Z"},"body":"This PR adds 3 additional classification tasks to SUPERB\r\n\r\n#### Intent Classification\r\nDataset URL seems to be down at the moment :( See the note below.\r\nS3PRL source: https:\/\/github.com\/s3prl\/s3prl\/blob\/master\/s3prl\/downstream\/fluent_commands\/dataset.py\r\nInstructions: https:\/\/github.com\/s3prl\/s3prl\/tree\/master\/s3prl\/downstream#ic-intent-classification---fluent-speech-commands\r\n\r\n#### Speaker Identification\r\nManual download script:\r\n```\r\nmkdir VoxCeleb1\r\ncd VoxCeleb1\r\n \r\nwget https:\/\/thor.robots.ox.ac.uk\/~vgg\/data\/voxceleb\/vox1a\/vox1_dev_wav_partaa\r\nwget https:\/\/thor.robots.ox.ac.uk\/~vgg\/data\/voxceleb\/vox1a\/vox1_dev_wav_partab\r\nwget https:\/\/thor.robots.ox.ac.uk\/~vgg\/data\/voxceleb\/vox1a\/vox1_dev_wav_partac\r\nwget https:\/\/thor.robots.ox.ac.uk\/~vgg\/data\/voxceleb\/vox1a\/vox1_dev_wav_partad\r\ncat vox1_dev* > vox1_dev_wav.zip\r\nunzip vox1_dev_wav.zip\r\n \r\nwget https:\/\/thor.robots.ox.ac.uk\/~vgg\/data\/voxceleb\/vox1a\/vox1_test_wav.zip\r\nunzip vox1_test_wav.zip\r\n \r\n# download the official SUPERB train-dev-test split\r\nwget https:\/\/raw.githubusercontent.com\/s3prl\/s3prl\/master\/s3prl\/downstream\/voxceleb1\/veri_test_class.txt\r\n```\r\nS3PRL source: https:\/\/github.com\/s3prl\/s3prl\/blob\/master\/s3prl\/downstream\/voxceleb1\/dataset.py\r\nInstructions: https:\/\/github.com\/s3prl\/s3prl\/tree\/master\/s3prl\/downstream#sid-speaker-identification\r\n\r\n#### Intent Classification\r\nManual download requires going through a slow application process, see the note below.\r\nS3PRL source: https:\/\/github.com\/s3prl\/s3prl\/blob\/master\/s3prl\/downstream\/emotion\/IEMOCAP_preprocess.py\r\nInstructions: https:\/\/github.com\/s3prl\/s3prl\/tree\/master\/s3prl\/downstream#er-emotion-recognition\r\n\r\n#### :warning: Note\r\nThese datasets either require manual downloads or have broken\/unstable links. You can get all necessary archives in this repo: https:\/\/huggingface.co\/datasets\/anton-l\/superb_source_data_dumps\/tree\/main","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2884\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2884\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2883","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2883\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2883\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2883\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2883","id":991969875,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMwMzYzNTQz","number":2883,"title":"Fix data URLs and metadata in DocRED dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-09T08:55:34Z","updated_at":"2021-09-13T11:24:31Z","closed_at":"2021-09-13T11:24:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2883","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2883","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2883.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2883.patch","merged_at":"2021-09-13T11:24:30Z"},"body":"The host of `docred` dataset has updated the `dev` data file. This PR:\r\n- Updates the dev URL\r\n- Updates dataset metadata\r\n\r\nThis PR also fixes the URL of the `train_distant` split, which was wrong.\r\n\r\nFix #2882.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2883\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2883\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2882","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2882\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2882\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2882\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2882","id":991800141,"node_id":"MDU6SXNzdWU5OTE4MDAxNDE=","number":2882,"title":"`load_dataset('docred')` results in a `NonMatchingChecksumError` ","user":{"login":"tmpr","id":51313597,"node_id":"MDQ6VXNlcjUxMzEzNTk3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/51313597?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tmpr","html_url":"https:\/\/github.com\/tmpr","followers_url":"https:\/\/api.github.com\/users\/tmpr\/followers","following_url":"https:\/\/api.github.com\/users\/tmpr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tmpr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tmpr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tmpr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tmpr\/orgs","repos_url":"https:\/\/api.github.com\/users\/tmpr\/repos","events_url":"https:\/\/api.github.com\/users\/tmpr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tmpr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-09T05:55:02Z","updated_at":"2021-09-13T11:24:30Z","closed_at":"2021-09-13T11:24:30Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI get consistent `NonMatchingChecksumError: Checksums didn't match for dataset source files` errors when trying to execute `datasets.load_dataset('docred')`.\r\n\r\n## Steps to reproduce the bug\r\nIt is quasi only this code:\r\n```python\r\nimport datasets\r\ndata = datasets.load_dataset('docred')\r\n```\r\n\r\n## Expected results\r\nThe DocRED dataset should be loaded without any problems.\r\n\r\n## Actual results\r\n```\r\nNonMatchingChecksumError Traceback (most recent call last)\r\n in \r\n----> 1 d = datasets.load_dataset('docred')\r\n\r\n~\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, task, streaming, **config_kwargs)\r\n 845 \r\n 846 # Download and prepare data\r\n--> 847 builder_instance.download_and_prepare(\r\n 848 download_config=download_config,\r\n 849 download_mode=download_mode,\r\n\r\n~\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 613 logger.warning(\"HF google storage unreachable. Downloading and preparing it from source\")\r\n 614 if not downloaded_from_gcs:\r\n--> 615 self._download_and_prepare(\r\n 616 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 617 )\r\n\r\n~\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 673 # Checksums verification\r\n 674 if verify_infos:\r\n--> 675 verify_checksums(\r\n 676 self.info.download_checksums, dl_manager.get_recorded_sizes_checksums(), \"dataset source files\"\r\n 677 )\r\n\r\n~\/anaconda3\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 38 if len(bad_urls) > 0:\r\n 39 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 40 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 41 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 42 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/drive.google.com\/uc?export=download&id=1fDmfUUo5G7gfaoqWWvK81u08m71TK2g7']\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.11.0\r\n- Platform: Linux-5.11.0-7633-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.5\r\n- PyArrow version: 5.0.0\r\n\r\nThis error also happened on my Windows-partition, after freshly installing python 3.9 and `datasets`.\r\n\r\n## Remarks\r\n\r\n- I have already called `rm -rf \/home\/\/.cache\/huggingface`, i.e., I have tried clearing the cache.\r\n- The problem does not exist for other datasets, i.e., it seems to be DocRED-specific.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2882\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2882\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2881","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2881\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2881\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2881\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2881","id":991639142,"node_id":"MDExOlB1bGxSZXF1ZXN0NzMwMDc1OTAy","number":2881,"title":"Add BIOSSES dataset","user":{"login":"bwang482","id":6764450,"node_id":"MDQ6VXNlcjY3NjQ0NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6764450?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bwang482","html_url":"https:\/\/github.com\/bwang482","followers_url":"https:\/\/api.github.com\/users\/bwang482\/followers","following_url":"https:\/\/api.github.com\/users\/bwang482\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bwang482\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bwang482\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bwang482\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bwang482\/orgs","repos_url":"https:\/\/api.github.com\/users\/bwang482\/repos","events_url":"https:\/\/api.github.com\/users\/bwang482\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bwang482\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-09T00:35:36Z","updated_at":"2021-09-13T14:20:40Z","closed_at":"2021-09-13T14:20:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2881","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2881","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2881.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2881.patch","merged_at":"2021-09-13T14:20:40Z"},"body":"Adding the biomedical semantic sentence similarity dataset, BIOSSES, listed in \"Biomedical Datasets - BigScience Workshop 2021\"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2881\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2881\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2880","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2880\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2880\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2880\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2880","id":990877940,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI5NDIzMDMy","number":2880,"title":"Extend support for streaming datasets that use pathlib.Path stem\/suffix","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-08T08:42:43Z","updated_at":"2021-09-09T13:13:29Z","closed_at":"2021-09-09T13:13:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2880","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2880","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2880.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2880.patch","merged_at":"2021-09-09T13:13:29Z"},"body":"This PR extends the support in streaming mode for datasets that use `pathlib`, by patching the properties `pathlib.Path.stem` and `pathlib.Path.suffix`.\r\n\r\nRelated to #2876, #2874, #2866.\r\n\r\nCC: @severo","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2880\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2880\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2879","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2879\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2879\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2879\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2879","id":990257404,"node_id":"MDU6SXNzdWU5OTAyNTc0MDQ=","number":2879,"title":"In v1.4.1, all TIMIT train transcripts are \"Would such an act of refusal be useful?\"","user":{"login":"rcgale","id":2279700,"node_id":"MDQ6VXNlcjIyNzk3MDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2279700?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rcgale","html_url":"https:\/\/github.com\/rcgale","followers_url":"https:\/\/api.github.com\/users\/rcgale\/followers","following_url":"https:\/\/api.github.com\/users\/rcgale\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rcgale\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rcgale\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rcgale\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rcgale\/orgs","repos_url":"https:\/\/api.github.com\/users\/rcgale\/repos","events_url":"https:\/\/api.github.com\/users\/rcgale\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rcgale\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-07T18:53:45Z","updated_at":"2021-09-08T16:55:19Z","closed_at":"2021-09-08T09:12:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nUsing version 1.4.1 of `datasets`, TIMIT transcripts are all the same.\r\n\r\n## Steps to reproduce the bug\r\nI was following this tutorial\r\n- https:\/\/huggingface.co\/blog\/fine-tune-wav2vec2-english\r\n\r\nBut here's a distilled repro:\r\n```python\r\n!pip install datasets==1.4.1\r\nfrom datasets import load_dataset\r\ntimit = load_dataset(\"timit_asr\", cache_dir=\".\/temp\")\r\nunique_transcripts = set(timit[\"train\"][\"text\"])\r\nprint(unique_transcripts)\r\nassert len(unique_transcripts) > 1\r\n```\r\n## Expected results\r\nExpected the correct TIMIT data. Or an error saying that this version of `datasets` can't produce it.\r\n\r\n## Actual results\r\nEvery train transcript was \"Would such an act of refusal be useful?\" Every test transcript was \"The bungalow was pleasantly situated near the shore.\"\r\n\r\n## Environment info\r\n- `datasets` version: 1.4.1\r\n- Platform: Darwin-18.7.0-x86_64-i386-64bit\r\n- Python version: 3.7.9\r\n- PyTorch version (GPU?): 1.9.0 (False)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Using GPU in script?: tried both\r\n- Using distributed or parallel set-up in script?: no\r\n- \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2879\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2879\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2878","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2878\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2878\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2878\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2878","id":990093316,"node_id":"MDU6SXNzdWU5OTAwOTMzMTY=","number":2878,"title":"NotADirectoryError: [WinError 267] During load_from_disk","user":{"login":"Grassycup","id":1875064,"node_id":"MDQ6VXNlcjE4NzUwNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1875064?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Grassycup","html_url":"https:\/\/github.com\/Grassycup","followers_url":"https:\/\/api.github.com\/users\/Grassycup\/followers","following_url":"https:\/\/api.github.com\/users\/Grassycup\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Grassycup\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Grassycup\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Grassycup\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Grassycup\/orgs","repos_url":"https:\/\/api.github.com\/users\/Grassycup\/repos","events_url":"https:\/\/api.github.com\/users\/Grassycup\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Grassycup\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-07T15:15:05Z","updated_at":"2021-09-07T15:15:05Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nTrying to load saved dataset or dataset directory from Amazon S3 on a Windows machine fails.\r\nPerforming the same operation succeeds on non-windows environment (AWS Sagemaker).\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Followed https:\/\/huggingface.co\/docs\/datasets\/filesystems.html#loading-a-processed-dataset-from-s3\r\n\r\nfrom datasets import load_from_disk\r\nfrom datasets.filesystems import S3FileSystem\r\n\r\n\r\ns3_file = \"output of save_to_disk\"\r\n\r\ns3_filesystem = S3FileSystem()\r\n\r\nload_from_disk(s3_file, fs=s3_filesystem)\r\n```\r\n\r\n## Expected results\r\nload_from_disk succeeds without error\r\n\r\n## Actual results\r\nSeems like it succeeds in pulling the file into a windows temp directory, as it exists in my system, but fails to process it.\r\n```\r\nException ignored in: \r\nTraceback (most recent call last):\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\weakref.py\", line 566, in __call__\r\n return info.func(*info.args, **(info.kwargs or {}))\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\tempfile.py\", line 817, in _cleanup\r\n cls._rmtree(name)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\tempfile.py\", line 813, in _rmtree\r\n _shutil.rmtree(name, onerror=onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 740, in rmtree\r\n return _rmtree_unsafe(path, onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 613, in _rmtree_unsafe\r\n _rmtree_unsafe(fullname, onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 613, in _rmtree_unsafe\r\n _rmtree_unsafe(fullname, onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 613, in _rmtree_unsafe\r\n _rmtree_unsafe(fullname, onerror)\r\n [Previous line repeated 2 more times]\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 618, in _rmtree_unsafe\r\n onerror(os.unlink, fullname, sys.exc_info())\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\tempfile.py\", line 805, in onerror\r\n cls._rmtree(path)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\tempfile.py\", line 813, in _rmtree\r\n _shutil.rmtree(name, onerror=onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 740, in rmtree\r\n return _rmtree_unsafe(path, onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 599, in _rmtree_unsafe\r\n onerror(os.scandir, path, sys.exc_info())\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 596, in _rmtree_unsafe\r\n with os.scandir(path) as scandir_it:\r\nNotADirectoryError: [WinError 267] The directory name is invalid: 'C:\\\\Users\\\\grassycup\\\\AppData\\\\Local\\\\Temp\\\\tmp45f_qbma\\\\tests3bucket\\\\output\\\\test_output\\\\train\\\\dataset.arrow'\r\nException ignored in: \r\nTraceback (most recent call last):\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\weakref.py\", line 566, in __call__\r\n return info.func(*info.args, **(info.kwargs or {}))\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\tempfile.py\", line 817, in _cleanup\r\n cls._rmtree(name)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\tempfile.py\", line 813, in _rmtree\r\n _shutil.rmtree(name, onerror=onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 740, in rmtree\r\n return _rmtree_unsafe(path, onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 613, in _rmtree_unsafe\r\n _rmtree_unsafe(fullname, onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 613, in _rmtree_unsafe\r\n _rmtree_unsafe(fullname, onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 613, in _rmtree_unsafe\r\n _rmtree_unsafe(fullname, onerror)\r\n [Previous line repeated 2 more times]\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 618, in _rmtree_unsafe\r\n onerror(os.unlink, fullname, sys.exc_info())\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\tempfile.py\", line 805, in onerror\r\n cls._rmtree(path)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\tempfile.py\", line 813, in _rmtree\r\n _shutil.rmtree(name, onerror=onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 740, in rmtree\r\n return _rmtree_unsafe(path, onerror)\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 599, in _rmtree_unsafe\r\n onerror(os.scandir, path, sys.exc_info())\r\n File \"C:\\Users\\grassycup\\Anaconda3\\envs\\hello.world\\lib\\shutil.py\", line 596, in _rmtree_unsafe\r\n with os.scandir(path) as scandir_it:\r\nNotADirectoryError: [WinError 267] The directory name is invalid:\r\n'C:\\\\Users\\\\grassycup\\\\AppData\\\\Local\\\\Temp\\\\tmp45f_qbma\\\\tests3bucket\\\\output\\\\test_output\\\\train\\\\dataset.arrow'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: Windows-10-10.0.19042-SP0\r\n- Python version: 3.8.11\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2878\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2878\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2877","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2877\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2877\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2877\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2877","id":990027249,"node_id":"MDU6SXNzdWU5OTAwMjcyNDk=","number":2877,"title":"Don't keep the dummy data folder or dataset_infos.json when resolving data files","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-07T14:09:04Z","updated_at":"2021-09-29T09:05:38Z","closed_at":"2021-09-29T09:05:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When there's no dataset script, all the data files of a folder or a repository on the Hub are loaded as data files.\r\n\r\nThere are already a few exceptions:\r\n- files starting with \".\" are ignored\r\n- the dataset card \"README.md\" is ignored\r\n- any file named \"config.json\" is ignored (currently it isn't used anywhere, but it could be used in the future to define splits or configs for example, but not 100% sure)\r\n\r\nHowever any data files in a folder named \"dummy\" should be ignored as well as they should only be used to test the dataset.\r\nSame for \"dataset_infos.json\" which should only be used to get the `dataset.info`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2877\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2877\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2876","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2876\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2876\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2876\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2876","id":990001079,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI4NjU3MDc2","number":2876,"title":"Extend support for streaming datasets that use pathlib.Path.glob","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-07T13:43:45Z","updated_at":"2021-09-10T09:50:49Z","closed_at":"2021-09-10T09:50:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2876","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2876","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2876.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2876.patch","merged_at":"2021-09-10T09:50:48Z"},"body":"This PR extends the support in streaming mode for datasets that use `pathlib`, by patching the method `pathlib.Path.glob`.\r\n\r\nRelated to #2874, #2866.\r\n\r\nCC: @severo","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2876\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2876\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2875","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2875\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2875\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2875\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2875","id":989919398,"node_id":"MDU6SXNzdWU5ODk5MTkzOTg=","number":2875,"title":"Add Congolese Swahili speech datasets","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-07T12:13:50Z","updated_at":"2021-09-07T12:13:50Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Congolese Swahili speech corpora\r\n- **Data:** https:\/\/gamayun.translatorswb.org\/data\/\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nAlso related: https:\/\/mobile.twitter.com\/OktemAlp\/status\/1435196393631764482","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2875\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2875\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2874","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2874\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2874\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2874\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2874","id":989685328,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI4Mzg2Mjg4","number":2874,"title":"Support streaming datasets that use pathlib","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-07T07:35:49Z","updated_at":"2021-09-07T18:25:22Z","closed_at":"2021-09-07T11:41:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2874","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2874","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2874.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2874.patch","merged_at":"2021-09-07T11:41:15Z"},"body":"This PR extends the support in streaming mode for datasets that use `pathlib.Path`.\r\n\r\nRelated to: #2866.\r\nCC: @severo ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2874\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2874\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2873","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2873\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2873\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2873\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2873","id":989587695,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI4MzA0MTMw","number":2873,"title":"adding swedish_medical_ner","user":{"login":"bwang482","id":6764450,"node_id":"MDQ6VXNlcjY3NjQ0NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6764450?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bwang482","html_url":"https:\/\/github.com\/bwang482","followers_url":"https:\/\/api.github.com\/users\/bwang482\/followers","following_url":"https:\/\/api.github.com\/users\/bwang482\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bwang482\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bwang482\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bwang482\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bwang482\/orgs","repos_url":"https:\/\/api.github.com\/users\/bwang482\/repos","events_url":"https:\/\/api.github.com\/users\/bwang482\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bwang482\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-09-07T04:44:53Z","updated_at":"2021-09-17T20:47:37Z","closed_at":"2021-09-17T20:47:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2873","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2873","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2873.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2873.patch","merged_at":null},"body":"Adding the Swedish Medical NER dataset, listed in \"Biomedical Datasets - BigScience Workshop 2021\"\r\n\r\nCode refactored ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2873\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2873\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2872","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2872\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2872\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2872\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2872","id":989453069,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI4MTkzMjkz","number":2872,"title":"adding swedish_medical_ner","user":{"login":"bwang482","id":6764450,"node_id":"MDQ6VXNlcjY3NjQ0NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6764450?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bwang482","html_url":"https:\/\/github.com\/bwang482","followers_url":"https:\/\/api.github.com\/users\/bwang482\/followers","following_url":"https:\/\/api.github.com\/users\/bwang482\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bwang482\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bwang482\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bwang482\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bwang482\/orgs","repos_url":"https:\/\/api.github.com\/users\/bwang482\/repos","events_url":"https:\/\/api.github.com\/users\/bwang482\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bwang482\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-06T22:00:52Z","updated_at":"2021-09-07T04:36:32Z","closed_at":"2021-09-07T04:36:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2872","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2872","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2872.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2872.patch","merged_at":null},"body":"Adding the Swedish Medical NER dataset, listed in \"Biomedical Datasets - BigScience Workshop 2021\"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2872\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2872\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2871","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2871\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2871\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2871\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2871","id":989436088,"node_id":"MDU6SXNzdWU5ODk0MzYwODg=","number":2871,"title":"datasets.config.PYARROW_VERSION has no attribute 'major'","user":{"login":"bwang482","id":6764450,"node_id":"MDQ6VXNlcjY3NjQ0NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6764450?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bwang482","html_url":"https:\/\/github.com\/bwang482","followers_url":"https:\/\/api.github.com\/users\/bwang482\/followers","following_url":"https:\/\/api.github.com\/users\/bwang482\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bwang482\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bwang482\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bwang482\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bwang482\/orgs","repos_url":"https:\/\/api.github.com\/users\/bwang482\/repos","events_url":"https:\/\/api.github.com\/users\/bwang482\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bwang482\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-09-06T21:06:57Z","updated_at":"2021-09-08T08:51:52Z","closed_at":"2021-09-08T08:51:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the test_dataset_common.py script, line 288-289\r\n\r\n```\r\nif datasets.config.PYARROW_VERSION.major < 3:\r\n packaged_datasets = [pd for pd in packaged_datasets if pd[\"dataset_name\"] != \"parquet\"]\r\n```\r\n\r\nwhich throws the error below. `datasets.config.PYARROW_VERSION` itself return the string '4.0.1'. I have tested this on both datasets.__version_=='1.11.0' and '1.9.0'. I am using Mac OS.\r\n\r\n```\r\nimport datasets\r\ndatasets.config.PYARROW_VERSION.major\r\n---------------------------------------------------------------------------\r\nAttributeError Traceback (most recent call last)\r\n\/var\/folders\/1f\/0wqmlgp90qjd5mpj53fnjq440000gn\/T\/ipykernel_73361\/2547517336.py in \r\n 1 import datasets\r\n----> 2 datasets.config.PYARROW_VERSION.major\r\n\r\nAttributeError: 'str' object has no attribute 'major'\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.11.0\r\n- Platform: Darwin-20.6.0-x86_64-i386-64bit\r\n- Python version: 3.7.11\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2871\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2871\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2870","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2870\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2870\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2870\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2870","id":988276859,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI3MjI4Njk5","number":2870,"title":"Fix three typos in two files for documentation","user":{"login":"leny-mi","id":25124853,"node_id":"MDQ6VXNlcjI1MTI0ODUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25124853?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/leny-mi","html_url":"https:\/\/github.com\/leny-mi","followers_url":"https:\/\/api.github.com\/users\/leny-mi\/followers","following_url":"https:\/\/api.github.com\/users\/leny-mi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/leny-mi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/leny-mi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/leny-mi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/leny-mi\/orgs","repos_url":"https:\/\/api.github.com\/users\/leny-mi\/repos","events_url":"https:\/\/api.github.com\/users\/leny-mi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/leny-mi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-04T11:49:43Z","updated_at":"2021-09-06T08:21:21Z","closed_at":"2021-09-06T08:19:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2870","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2870","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2870.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2870.patch","merged_at":"2021-09-06T08:19:35Z"},"body":"Changed \"bacth_size\" to \"batch_size\" (2x)\r\nChanged \"intsructions\" to \"instructions\"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2870\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2870\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2869","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2869\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2869\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2869\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2869","id":987676420,"node_id":"MDU6SXNzdWU5ODc2NzY0MjA=","number":2869,"title":"TypeError: 'NoneType' object is not callable","user":{"login":"Chenfei-Kang","id":40911446,"node_id":"MDQ6VXNlcjQwOTExNDQ2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40911446?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Chenfei-Kang","html_url":"https:\/\/github.com\/Chenfei-Kang","followers_url":"https:\/\/api.github.com\/users\/Chenfei-Kang\/followers","following_url":"https:\/\/api.github.com\/users\/Chenfei-Kang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Chenfei-Kang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Chenfei-Kang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Chenfei-Kang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Chenfei-Kang\/orgs","repos_url":"https:\/\/api.github.com\/users\/Chenfei-Kang\/repos","events_url":"https:\/\/api.github.com\/users\/Chenfei-Kang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Chenfei-Kang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-09-03T11:27:39Z","updated_at":"2021-09-08T12:09:58Z","closed_at":"2021-09-08T09:24:55Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nTypeError: 'NoneType' object is not callable\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset, load_metric\r\ndataset = datasets.load_dataset(\"glue\", 'cola')\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform:\r\n- Python version: 3.7\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2869\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2869\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2868","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2868\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2868\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2868\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2868","id":987139146,"node_id":"MDU6SXNzdWU5ODcxMzkxNDY=","number":2868,"title":"Add Common Objects in 3D (CO3D)","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-02T20:36:12Z","updated_at":"2021-12-08T12:02:10Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *Common Objects in 3D (CO3D)*\r\n- **Description:** *See blog post [here](https:\/\/ai.facebook.com\/blog\/common-objects-in-3d-dataset-for-3d-reconstruction)*\r\n- **Paper:** *[link to paper](https:\/\/arxiv.org\/abs\/2109.00512)*\r\n- **Data:** *[link to data](https:\/\/ai.facebook.com\/datasets\/co3d-downloads\/)*\r\n- **Motivation:** *excerpt from above blog post:*\r\n\r\n> As the first data set of its kind, CO3D will aptly enable reconstruction of real-life 3D objects. Indeed, CO3D already provides training data to enable our NeRFormer to tackle the new-view synthesis (NVS) task. Here, photorealistic NVS is a major step on the path to fully immersive AR\/VR effects, where objects can be virtually transported across different environments, which will allow connecting users by sharing or recollecting their experiences.\r\n> \r\n> Besides practical applications in AR\/VR, we hope that the data set will become a standard testbed for the recent proliferation of methods (including NeRFormer, Implicit Differentiable Renderer, NeRF, and others) that reconstruct 3D scenes by means of an implicit shape model.\r\n> \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2868\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2868\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2867","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2867\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2867\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2867\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2867","id":986971224,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI2MTE3NzAw","number":2867,"title":"Add CaSiNo dataset","user":{"login":"kushalchawla","id":8416863,"node_id":"MDQ6VXNlcjg0MTY4NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8416863?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kushalchawla","html_url":"https:\/\/github.com\/kushalchawla","followers_url":"https:\/\/api.github.com\/users\/kushalchawla\/followers","following_url":"https:\/\/api.github.com\/users\/kushalchawla\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kushalchawla\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kushalchawla\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kushalchawla\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kushalchawla\/orgs","repos_url":"https:\/\/api.github.com\/users\/kushalchawla\/repos","events_url":"https:\/\/api.github.com\/users\/kushalchawla\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kushalchawla\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-09-02T17:06:23Z","updated_at":"2021-09-16T15:12:54Z","closed_at":"2021-09-16T09:23:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2867","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2867","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2867.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2867.patch","merged_at":"2021-09-16T09:23:44Z"},"body":"Hi. I request you to add our dataset to the repository. \r\n\r\nThis data was recently published at NAACL 2021: https:\/\/aclanthology.org\/2021.naacl-main.254.pdf","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2867\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2867\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2866","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2866\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2866\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2866\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2866","id":986706676,"node_id":"MDU6SXNzdWU5ODY3MDY2NzY=","number":2866,"title":"\"counter\" dataset raises an error in normal mode, but not in streaming mode","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":11,"created_at":"2021-09-02T13:10:53Z","updated_at":"2021-10-14T09:24:09Z","closed_at":"2021-10-14T09:24:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n`counter` dataset raises an error on `load_dataset()`, but simply returns an empty iterator in streaming mode.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\n>>> import datasets as ds\r\n>>> a = ds.load_dataset('counter', split=\"train\", streaming=False)\r\nUsing custom data configuration default\r\nDownloading and preparing dataset counter\/default (download: 1.29 MiB, generated: 2.48 MiB, post-processed: Unknown size, total: 3.77 MiB) to \/home\/slesage\/.cache\/huggingface\/datasets\/counter\/default\/1.0.0\/9f84962fa0f35bec5a34fe0bdff8681838d497008c457f7856c48654476ec0e9...\r\nTraceback (most recent call last):\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 726, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 1124, in _prepare_split\r\n for key, record in utils.tqdm(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/tqdm\/std.py\", line 1185, in __iter__\r\n for obj in iterable:\r\n File \"\/home\/slesage\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/counter\/9f84962fa0f35bec5a34fe0bdff8681838d497008c457f7856c48654476ec0e9\/counter.py\", line 161, in _generate_examples\r\n with derived_file.open(encoding=\"utf-8\") as f:\r\n File \"\/home\/slesage\/.pyenv\/versions\/3.8.11\/lib\/python3.8\/pathlib.py\", line 1222, in open\r\n return io.open(self, mode, buffering, encoding, errors, newline,\r\n File \"\/home\/slesage\/.pyenv\/versions\/3.8.11\/lib\/python3.8\/pathlib.py\", line 1078, in _opener\r\n return self._accessor.open(self, flags, mode)\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/home\/slesage\/.cache\/huggingface\/datasets\/downloads\/extracted\/b57aa6db5601a738e57b95c1fd8cced54ff28fc540efcdaf0f6c4f1bb5dfe211\/COUNTER\/0032p.xml'\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 1112, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 636, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/slesage\/hf\/datasets-preview-backend\/.venv\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 728, in _download_and_prepare\r\n raise OSError(\r\nOSError: Cannot find data file.\r\nOriginal error:\r\n[Errno 2] No such file or directory: '\/home\/slesage\/.cache\/huggingface\/datasets\/downloads\/extracted\/b57aa6db5601a738e57b95c1fd8cced54ff28fc540efcdaf0f6c4f1bb5dfe211\/COUNTER\/0032p.xml'\r\n```\r\n\r\n```python\r\n>>> import datasets as ds\r\n>>> b = ds.load_dataset('counter', split=\"train\", streaming=True)\r\nUsing custom data configuration default\r\n>>> list(b)\r\n[]\r\n```\r\n\r\n## Expected results\r\n\r\nAn exception should be raised in streaming mode\r\n\r\n## Actual results\r\n\r\nNo exception is raised in streaming mode: there is no way to tell if something has broken or if the dataset is simply empty.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.1.dev0\r\n- Platform: Linux-5.11.0-1016-aws-x86_64-with-glibc2.29\r\n- Python version: 3.8.11\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2866\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2866\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2865","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2865\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2865\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2865\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2865","id":986460698,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI1NjY1ODgx","number":2865,"title":"Add MultiEURLEX dataset","user":{"login":"iliaschalkidis","id":1626984,"node_id":"MDQ6VXNlcjE2MjY5ODQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1626984?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliaschalkidis","html_url":"https:\/\/github.com\/iliaschalkidis","followers_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/followers","following_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/repos","events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-09-02T09:42:24Z","updated_at":"2021-09-10T11:50:06Z","closed_at":"2021-09-10T11:50:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2865","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2865","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2865.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2865.patch","merged_at":"2021-09-10T11:50:06Z"},"body":"**Add new MultiEURLEX Dataset**\r\n\r\nMultiEURLEX comprises 65k EU laws in 23 official EU languages (some low-ish resource). Each EU law has been annotated with EUROVOC concepts (labels) by the Publication Office of EU. As with the English EURLEX, the goal is to predict the relevant EUROVOC concepts (labels); this is multi-label classification task (given the text, predict multiple labels).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2865\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2865\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2864","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2864\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2864\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2864\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2864","id":986159438,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI1MzkyNjcw","number":2864,"title":"Fix data URL in ToTTo dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/8","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8\/labels","id":6968069,"node_id":"MI_kwDODunzps4AalMF","number":8,"title":"1.12","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":4,"closed_issues":2,"state":"open","created_at":"2021-07-21T15:34:56Z","updated_at":"2021-10-13T10:26:33Z","due_on":"2021-08-30T07:00:00Z","closed_at":null},"comments":0,"created_at":"2021-09-02T05:25:08Z","updated_at":"2021-09-02T06:47:40Z","closed_at":"2021-09-02T06:47:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2864","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2864","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2864.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2864.patch","merged_at":"2021-09-02T06:47:40Z"},"body":"Data source host changed their data URL: google-research-datasets\/ToTTo@cebeb43.\r\n\r\nFix #2860.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2864\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2864\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2863","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2863\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2863\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2863\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2863","id":986156755,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI1MzkwMTkx","number":2863,"title":"Update dataset URL","user":{"login":"mrm8488","id":3653789,"node_id":"MDQ6VXNlcjM2NTM3ODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3653789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mrm8488","html_url":"https:\/\/github.com\/mrm8488","followers_url":"https:\/\/api.github.com\/users\/mrm8488\/followers","following_url":"https:\/\/api.github.com\/users\/mrm8488\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mrm8488\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mrm8488\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mrm8488\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mrm8488\/orgs","repos_url":"https:\/\/api.github.com\/users\/mrm8488\/repos","events_url":"https:\/\/api.github.com\/users\/mrm8488\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mrm8488\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-09-02T05:22:18Z","updated_at":"2021-09-02T08:10:50Z","closed_at":"2021-09-02T08:10:50Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2863","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2863","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2863.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2863.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2863\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2863\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2862","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2862\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2862\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2862\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2862","id":985763001,"node_id":"MDU6SXNzdWU5ODU3NjMwMDE=","number":2862,"title":"Only retain relevant statistics in certain metrics","user":{"login":"ZhaofengWu","id":11954789,"node_id":"MDQ6VXNlcjExOTU0Nzg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11954789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZhaofengWu","html_url":"https:\/\/github.com\/ZhaofengWu","followers_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/followers","following_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/repos","events_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-09-01T22:18:10Z","updated_at":"2021-09-01T22:18:10Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nAs I understand, in the `add_batch()` function, the raw predictions and references are kept (in memory?) until `compute()` is called.\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/e248247518140d5b0527ce2843a1a327e2902059\/src\/datasets\/metric.py#L423-L442\r\n\r\nThis takes O(n) memory. However, for many (most?) metrics, this is not necessary. E.g., for accuracy, only the # correct and # total need to be recorded.\r\n\r\n**Describe the solution you'd like**\r\nProbably an inheritance hierarchy where `\"predictions\"` and `\"references\"` are not always the two keys for the final metric computation. Each metric should create and maintain its own relevant statistics, again for example, `\"n_correct\"` and `\"n_total\"` for accuracy.\r\n\r\nI believe the metrics in AllenNLP (https:\/\/github.com\/allenai\/allennlp\/tree\/39c40fe38cd2fd36b3465b0b3c031f54ec824160\/allennlp\/training\/metrics) can be used as a good reference.\r\n\r\n**Describe alternatives you've considered**\r\nAt least `Metric.compute()` shouldn't hard-code `\"predictions\"` and `\"references\"` so that custom subclasses may override this behavior.\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/e248247518140d5b0527ce2843a1a327e2902059\/src\/datasets\/metric.py#L399-L400","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2862\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2862\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2861","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2861\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2861\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2861\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2861","id":985081871,"node_id":"MDExOlB1bGxSZXF1ZXN0NzI0NDM2OTcw","number":2861,"title":"fix: \ud83d\udc1b be more specific when catching exceptions","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-09-01T12:18:12Z","updated_at":"2021-09-02T09:53:36Z","closed_at":"2021-09-02T09:52:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2861","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2861","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2861.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2861.patch","merged_at":null},"body":"The same specific exception is catched in other parts of the same\r\nfunction.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2861\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2861\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2860","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2860\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2860\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2860\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2860","id":985013339,"node_id":"MDU6SXNzdWU5ODUwMTMzMzk=","number":2860,"title":"Cannot download TOTTO dataset","user":{"login":"mrm8488","id":3653789,"node_id":"MDQ6VXNlcjM2NTM3ODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3653789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mrm8488","html_url":"https:\/\/github.com\/mrm8488","followers_url":"https:\/\/api.github.com\/users\/mrm8488\/followers","following_url":"https:\/\/api.github.com\/users\/mrm8488\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mrm8488\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mrm8488\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mrm8488\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mrm8488\/orgs","repos_url":"https:\/\/api.github.com\/users\/mrm8488\/repos","events_url":"https:\/\/api.github.com\/users\/mrm8488\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mrm8488\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-09-01T11:04:10Z","updated_at":"2021-09-02T06:47:40Z","closed_at":"2021-09-02T06:47:40Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Error: Couldn't find file at https:\/\/storage.googleapis.com\/totto\/totto_data.zip\r\n\r\n`datasets version: 1.11.0`\r\n# How to reproduce:\r\n\r\n```py\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('totto')\r\n```\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2860\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2860\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2859","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2859\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2859\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2859\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2859","id":984324500,"node_id":"MDU6SXNzdWU5ODQzMjQ1MDA=","number":2859,"title":"Loading allenai\/c4 in streaming mode does too many HEAD requests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3287858981,"node_id":"MDU6TGFiZWwzMjg3ODU4OTgx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/streaming","name":"streaming","color":"fef2c0","default":false,"description":""}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-08-31T21:11:04Z","updated_at":"2021-10-12T07:35:52Z","closed_at":"2021-10-11T11:05:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This does 60,000+ HEAD requests to get all the ETags of all the data files:\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset(\"allenai\/c4\", streaming=True)\r\n```\r\nIt makes loading the dataset completely impractical.\r\n\r\nThe ETags are used to compute the config id (it must depend on the data files being used).\r\nInstead of using the ETags, we could simply use the commit hash of the dataset repository on the hub, as well and the glob pattern used to resolve the files (here it's `*` by default, to load all the files of the repository)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2859\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2859\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2858","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2858\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2858\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2858\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2858","id":984145568,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIzNjEzNzQ0","number":2858,"title":"Fix s3fs version in CI","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-31T18:05:43Z","updated_at":"2021-09-06T13:33:35Z","closed_at":"2021-08-31T21:29:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2858","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2858","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2858.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2858.patch","merged_at":"2021-08-31T21:29:51Z"},"body":"The latest s3fs version has new constrains on aiobotocore, and therefore on boto3 and botocore\r\n\r\nThis PR changes the constrains to avoid the new conflicts\r\n\r\nIn particular it pins the version of s3fs.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2858\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2858\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2857","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2857\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2857\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2857\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2857","id":984093938,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIzNTY5OTE4","number":2857,"title":"Update: Openwebtext - update size","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-31T17:11:03Z","updated_at":"2021-09-07T09:44:32Z","closed_at":"2021-09-07T09:44:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2857","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2857","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2857.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2857.patch","merged_at":"2021-09-07T09:44:32Z"},"body":"Update the size of the Openwebtext dataset\r\n\r\nI also regenerated the dataset_infos.json but the data file checksum didn't change, and the number of examples either (8013769 examples)\r\n\r\nrelated to #2839 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2857\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2857\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2856","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2856\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2856\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2856\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2856","id":983876734,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIzMzg2NzIw","number":2856,"title":"fix: \ud83d\udc1b remove URL's query string only if it's ?dl=1","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-31T13:40:07Z","updated_at":"2021-08-31T14:22:12Z","closed_at":"2021-08-31T14:22:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2856","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2856","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2856.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2856.patch","merged_at":"2021-08-31T14:22:12Z"},"body":"A lot of URL use the query strings, for example\r\nhttp:\/\/opus.nlpl.eu\/download.php?f=Bianet\/v1\/moses\/en-ku.txt.zip, we\r\nmust not remove it when trying to detect the protocol. We thus remove it\r\nonly in the case of the query string being ?dl=1 which occurs on dropbox\r\nand dl.orangedox.com. Also: add unit tests.\r\n\r\nSee https:\/\/github.com\/huggingface\/datasets\/pull\/2843 for the original\r\ndiscussion.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2856\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2856\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2855","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2855\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2855\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2855\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2855","id":983858229,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIzMzcxMTIy","number":2855,"title":"Fix windows CI CondaError","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-31T13:22:02Z","updated_at":"2021-08-31T13:35:34Z","closed_at":"2021-08-31T13:35:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2855","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2855","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2855.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2855.patch","merged_at":"2021-08-31T13:35:33Z"},"body":"From this thread: https:\/\/github.com\/conda\/conda\/issues\/6057\r\n\r\nWe can fix the conda error\r\n```\r\nCondaError: Cannot link a source that does not exist.\r\nC:\\Users\\...\\Anaconda3\\Scripts\\conda.exe\r\n```\r\n\r\nby doing\r\n```bash\r\nconda update conda\r\n```\r\n\r\nbefore doing any install in the windows CI","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2855\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2855\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2854","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2854\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2854\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2854\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2854","id":983726084,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIzMjU3NDg5","number":2854,"title":"Fix caching when moving script","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-31T10:58:35Z","updated_at":"2021-08-31T13:13:36Z","closed_at":"2021-08-31T13:13:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2854","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2854","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2854.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2854.patch","merged_at":"2021-08-31T13:13:36Z"},"body":"When caching the result of a `map` function, the hash that is computed depends on many properties of this function, such as all the python objects it uses, its code and also the location of this code.\r\n\r\nUsing the full path of the python script for the location of the code makes the hash change if a script like `run_mlm.py` is moved.\r\n\r\nI changed this by simply using the base name of the script instead of the full path.\r\n\r\nNote that this change also affects the hash of the code used from imported modules, but I think it's fine. Indeed it hashes the code of the imported modules anyway, so the location of the python files of the imported modules doesn't matter when computing the hash.\r\n\r\nClose https:\/\/github.com\/huggingface\/datasets\/issues\/2825","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2854\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2854\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2853","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2853\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2853\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2853\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2853","id":983692026,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIzMjI4NDY3","number":2853,"title":"Add AMI dataset","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-31T10:19:01Z","updated_at":"2021-09-29T09:19:19Z","closed_at":"2021-09-29T09:19:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2853","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2853","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2853.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2853.patch","merged_at":"2021-09-29T09:19:18Z"},"body":"This is an initial commit for AMI dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2853\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2853\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2852","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2852\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2852\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2852\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2852","id":983609352,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIzMTU4Mzc4","number":2852,"title":"Fix: linnaeus - fix url","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-31T08:51:13Z","updated_at":"2021-08-31T13:12:10Z","closed_at":"2021-08-31T13:12:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2852","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2852","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2852.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2852.patch","merged_at":"2021-08-31T13:12:09Z"},"body":"The url was causing a `ConnectionError` because of the \"\/\" at the end\r\n\r\nClose https:\/\/github.com\/huggingface\/datasets\/issues\/2821","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2852\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2852\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2851","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2851\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2851\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2851\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2851","id":982789593,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIyNDg4MDY2","number":2851,"title":"Update `column_names` showed as `:func:` in exploring.st","user":{"login":"ClementRomac","id":8899812,"node_id":"MDQ6VXNlcjg4OTk4MTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8899812?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ClementRomac","html_url":"https:\/\/github.com\/ClementRomac","followers_url":"https:\/\/api.github.com\/users\/ClementRomac\/followers","following_url":"https:\/\/api.github.com\/users\/ClementRomac\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ClementRomac\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ClementRomac\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ClementRomac\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ClementRomac\/orgs","repos_url":"https:\/\/api.github.com\/users\/ClementRomac\/repos","events_url":"https:\/\/api.github.com\/users\/ClementRomac\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ClementRomac\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-30T13:21:46Z","updated_at":"2021-09-01T08:42:11Z","closed_at":"2021-08-31T14:45:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2851","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2851","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2851.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2851.patch","merged_at":"2021-08-31T14:45:46Z"},"body":"Hi, \r\n\r\nOne mention of `column_names` in exploring.st was showing it as `:func:` instead of `:attr:`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2851\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2851\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2850","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2850\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2850\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2850\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2850","id":982654644,"node_id":"MDU6SXNzdWU5ODI2NTQ2NDQ=","number":2850,"title":"Wound segmentation datasets","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-30T10:44:32Z","updated_at":"2021-12-08T12:02:00Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Wound segmentation datasets\r\n- **Description:** annotated wound image dataset \r\n- **Paper:** https:\/\/www.nature.com\/articles\/s41598-020-78799-w\r\n- **Data:** https:\/\/github.com\/uwm-bigdata\/wound-segmentation\r\n- **Motivation:** Interesting simple image dataset, useful for segmentation, with visibility due to http:\/\/www.miccai.org\/special-interest-groups\/challenges\/ and https:\/\/fusc.grand-challenge.org\/\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2850\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2850\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2849","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2849\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2849\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2849\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2849","id":982631420,"node_id":"MDU6SXNzdWU5ODI2MzE0MjA=","number":2849,"title":"Add Open Catalyst Project Dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-30T10:14:39Z","updated_at":"2021-08-30T10:14:39Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Open Catalyst 2020 (OC20) Dataset\r\n- **Website:** https:\/\/opencatalystproject.org\/\r\n- **Data:** https:\/\/github.com\/Open-Catalyst-Project\/ocp\/blob\/master\/DATASET.md\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2849\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2849\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2848","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2848\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2848\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2848\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2848","id":981953908,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIxODYyMDQx","number":2848,"title":"Update README.md","user":{"login":"odellus","id":4686956,"node_id":"MDQ6VXNlcjQ2ODY5NTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4686956?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/odellus","html_url":"https:\/\/github.com\/odellus","followers_url":"https:\/\/api.github.com\/users\/odellus\/followers","following_url":"https:\/\/api.github.com\/users\/odellus\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/odellus\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/odellus\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/odellus\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/odellus\/orgs","repos_url":"https:\/\/api.github.com\/users\/odellus\/repos","events_url":"https:\/\/api.github.com\/users\/odellus\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/odellus\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-28T23:58:26Z","updated_at":"2021-09-07T09:40:32Z","closed_at":"2021-09-07T09:40:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2848","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2848","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2848.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2848.patch","merged_at":"2021-09-07T09:40:32Z"},"body":"Changed 'Tain' to 'Train'.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2848\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2848\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2847","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2847\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2847\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2847\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2847","id":981589693,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIxNjA3OTA0","number":2847,"title":"fix regex to accept negative timezone","user":{"login":"jadermcs","id":7156771,"node_id":"MDQ6VXNlcjcxNTY3NzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7156771?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jadermcs","html_url":"https:\/\/github.com\/jadermcs","followers_url":"https:\/\/api.github.com\/users\/jadermcs\/followers","following_url":"https:\/\/api.github.com\/users\/jadermcs\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jadermcs\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jadermcs\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jadermcs\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jadermcs\/orgs","repos_url":"https:\/\/api.github.com\/users\/jadermcs\/repos","events_url":"https:\/\/api.github.com\/users\/jadermcs\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jadermcs\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-27T20:54:05Z","updated_at":"2021-09-13T20:39:50Z","closed_at":"2021-09-07T09:34:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2847","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2847","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2847.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2847.patch","merged_at":"2021-09-07T09:34:23Z"},"body":"fix #2846","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2847\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2847\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2846","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2846\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2846\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2846\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2846","id":981587590,"node_id":"MDU6SXNzdWU5ODE1ODc1OTA=","number":2846,"title":"Negative timezone","user":{"login":"jadermcs","id":7156771,"node_id":"MDQ6VXNlcjcxNTY3NzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7156771?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jadermcs","html_url":"https:\/\/github.com\/jadermcs","followers_url":"https:\/\/api.github.com\/users\/jadermcs\/followers","following_url":"https:\/\/api.github.com\/users\/jadermcs\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jadermcs\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jadermcs\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jadermcs\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jadermcs\/orgs","repos_url":"https:\/\/api.github.com\/users\/jadermcs\/repos","events_url":"https:\/\/api.github.com\/users\/jadermcs\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jadermcs\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-27T20:50:33Z","updated_at":"2021-09-10T11:51:07Z","closed_at":"2021-09-10T11:51:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe load_dataset method do not accept a parquet file with a negative timezone, as it has the following regex:\r\n```\r\n\"^(s|ms|us|ns),\\s*tz=([a-zA-Z0-9\/_+:]*)$\"\r\n```\r\nSo a valid timestap ```timestamp[us, tz=-03:00]``` returns an error when loading parquet files.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Where the timestamp column has a tz of -03:00\r\ndatasets = load_dataset('parquet', data_files={'train': train_files, 'validation': validation_files,\r\n 'test': test_files}, cache_dir=\".\/cache_teste\/\")\r\n```\r\n\r\n## Expected results\r\nThe -03:00 is a valid tz so the regex should accept this without raising an error.\r\n\r\n## Actual results\r\nAs this regex disaproves a valid tz it raises the following error:\r\n```python\r\nraise ValueError(\r\n f\"{datasets_dtype} is not a validly formatted string representation of a pyarrow timestamp.\"\r\n f\"Examples include timestamp[us] or timestamp[us, tz=America\/New_York]\"\r\n f\"See: https:\/\/arrow.apache.org\/docs\/python\/generated\/pyarrow.timestamp.html#pyarrow.timestamp\"\r\n )\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: Ubuntu 20.04\r\n- Python version: 3.8\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2846\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2846\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2845","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2845\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2845\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2845\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2845","id":981487861,"node_id":"MDU6SXNzdWU5ODE0ODc4NjE=","number":2845,"title":"[feature request] adding easy to remember `datasets.cache_dataset()` + `datasets.is_dataset_cached()`","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-27T18:21:51Z","updated_at":"2021-08-27T18:24:05Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Often, there is a need to prepare a dataset but not use it immediately, e.g. think tests suite setup, so it'd be really useful to be able to do:\r\n\r\n``` \r\nif not datasets.is_dataset_cached(ds): datasets.cache_dataset(ds)\r\n```\r\n\r\nThis can already be done with:\r\n```\r\nbuilder = load_dataset_builder(ds)\r\nif not os.path.idsir(builder.cache_dir):\r\n builder.download_and_prepare()\r\n```\r\n\r\nbut the current way is a way less intuitive and much harder to remember than the proposed API, IMHO. \r\n\r\nOne more way is to do:\r\n\r\n```\r\n_ = load_dataset(ds)\r\n```\r\nbut it wastes resources loading the dataset when it's not needed.\r\n\r\nthis has been discussed at https:\/\/huggingface.slack.com\/archives\/C01229B19EX\/p1630021912025800\r\n\r\nThank you!\r\n\r\n@lhoestq \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2845\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2845\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2844","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2844\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2844\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2844\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2844","id":981382806,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIxNDQzMjY2","number":2844,"title":"Fix: wikicorpus - fix keys","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-27T15:56:06Z","updated_at":"2021-09-06T14:07:28Z","closed_at":"2021-09-06T14:07:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2844","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2844","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2844.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2844.patch","merged_at":"2021-09-06T14:07:27Z"},"body":"As mentioned in https:\/\/github.com\/huggingface\/datasets\/issues\/2552, there is a duplicate keys error in `wikicorpus`.\r\n\r\nI fixed that by taking into account the file index in the keys","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2844\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2844\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2843","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2843\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2843\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2843\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2843","id":981317775,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIxMzkwODA5","number":2843,"title":"Fix extraction protocol inference from urls with params","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-08-27T14:40:57Z","updated_at":"2021-08-30T17:11:49Z","closed_at":"2021-08-30T13:12:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2843","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2843","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2843.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2843.patch","merged_at":"2021-08-30T13:12:01Z"},"body":"Previously it was unable to infer the compression protocol for files at URLs like\r\n```\r\nhttps:\/\/foo.bar\/train.json.gz?dl=1\r\n```\r\nbecause of the query parameters.\r\n\r\nI fixed that, this should allow 10+ datasets to work in streaming mode:\r\n```\r\n \"discovery\",\r\n \"emotion\",\r\n \"grail_qa\",\r\n \"guardian_authorship\",\r\n \"pragmeval\",\r\n \"simple_questions_v2\",\r\n \"versae\/adobo\",\r\n \"w-nicole\/childes_data\",\r\n \"w-nicole\/childes_data_no_tags_\",\r\n \"w-nicole\/childes_data_with_tags\",\r\n \"w-nicole\/childes_data_with_tags_\"\r\n```\r\n\r\ncc @severo ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2843\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2843\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2842","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2842\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2842\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2842\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2842","id":980725899,"node_id":"MDU6SXNzdWU5ODA3MjU4OTk=","number":2842,"title":"always requiring the username in the dataset name when there is one","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-08-26T23:31:53Z","updated_at":"2021-10-22T09:43:35Z","closed_at":"2021-10-22T09:43:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Me and now another person have been bitten by the `datasets`'s non-strictness on requiring a dataset creator's username when it's due.\r\n\r\nSo both of us started with `stas\/openwebtext-10k`, somewhere along the lines lost `stas\/` and continued using `openwebtext-10k` and it all was good until we published the software and things broke, since there is no `openwebtext-10k`\r\n\r\nSo this feature request is asking to tighten the checking and not allow dataset loading if it was downloaded with the user prefix, but then attempted to be used w\/o it.\r\n\r\nThe same in code:\r\n\r\n```\r\n# first run\r\npython -c \"from datasets import load_dataset; load_dataset('stas\/openwebtext-10k')\"\r\n# now run immediately\r\npython -c \"from datasets import load_dataset; load_dataset('openwebtext-10k')\"\r\n# the second command should fail, but it doesn't fail now.\r\n```\r\n\r\nPlease let me know if I explained myself clearly.\r\n\r\nThank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2842\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2842\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2841","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2841\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2841\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2841\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2841","id":980497321,"node_id":"MDU6SXNzdWU5ODA0OTczMjE=","number":2841,"title":"Adding GLUECoS Hinglish and Spanglish code-switching bemchmark","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-26T17:47:39Z","updated_at":"2021-10-20T18:41:20Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** GLUECoS\r\n- **Description:** a Microsoft Benchmark to evaluate code-switching for only two language pairs but a variety of tasks\r\n- **Paper:** https:\/\/aclanthology.org\/2020.acl-main.329\/\r\n- **Data:** https:\/\/github.com\/microsoft\/GLUECoS\r\n- **Motivation:** We currently only have [one other](https:\/\/huggingface.co\/datasets\/lince) dataset for code-switching\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2841\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2841\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2840","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2840\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2840\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2840\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2840","id":980489074,"node_id":"MDU6SXNzdWU5ODA0ODkwNzQ=","number":2840,"title":"How can I compute BLEU-4 score use `load_metric` ?","user":{"login":"Doragd","id":26213546,"node_id":"MDQ6VXNlcjI2MjEzNTQ2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26213546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Doragd","html_url":"https:\/\/github.com\/Doragd","followers_url":"https:\/\/api.github.com\/users\/Doragd\/followers","following_url":"https:\/\/api.github.com\/users\/Doragd\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Doragd\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Doragd\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Doragd\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Doragd\/orgs","repos_url":"https:\/\/api.github.com\/users\/Doragd\/repos","events_url":"https:\/\/api.github.com\/users\/Doragd\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Doragd\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-26T17:36:37Z","updated_at":"2021-08-27T08:13:24Z","closed_at":"2021-08-27T08:13:24Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I have found the sacrebleu metric. But, I do not know the difference between it and BLEU-4.\r\nIf I want to compute BLEU-4 score, what can i do?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2840\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2840\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2839","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2839\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2839\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2839\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2839","id":980271715,"node_id":"MDU6SXNzdWU5ODAyNzE3MTU=","number":2839,"title":"OpenWebText: NonMatchingSplitsSizesError","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-08-26T13:50:26Z","updated_at":"2021-09-21T14:12:40Z","closed_at":"2021-09-21T14:09:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nWhen downloading `openwebtext`, I'm getting:\r\n```\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=39769494896, num_examples=8013769, dataset_name='openwebtext'), 'recorded': SplitInfo(name='train', num_bytes=39611023912, num_examples=7982430, dataset_name='openwebtext')}]\r\n```\r\n\r\nI suspect that the file we download from has changed since the size doesn't look like to match with documentation\r\n\r\n`Downloading: 0%| | 0.00\/12.9G [00:00\r\n- `datasets` version: 1.10.2\r\n- Platform: linux (Redhat version 8.1)\r\n- Python version: 3.8\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2839\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2839\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2838","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2838\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2838\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2838\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2838","id":980067186,"node_id":"MDExOlB1bGxSZXF1ZXN0NzIwMzcxMDk5","number":2838,"title":"Add error_bad_chunk to the JSON loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-26T10:07:32Z","updated_at":"2021-08-26T10:08:06Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2838","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2838","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2838.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2838.patch","merged_at":null},"body":"Add the `error_bad_chunk` parameter to the JSON loader.\r\n\r\nSetting `error_bad_chunk=False` allows to skip an unparsable chunk of JSON data without raising an error.\r\n\r\nAdditional note:\r\n\r\nIn case of an unparsable JSON chunk, the JSON loader no longer tries to load the full JSON (which could take a lot of time in streaming mode) to get the JSON fields that the user may have forgotten to pass. Ex : for squad-like data, the user has to pass `field=\"data\"` to tell the loader to get the list of examples from this field.\r\n\r\nTODO: update docs\r\n\r\ncc @lvwerra ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2838\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2838\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2837","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2837\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2837\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2837\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2837","id":979298297,"node_id":"MDU6SXNzdWU5NzkyOTgyOTc=","number":2837,"title":"prepare_module issue when loading from read-only fs","user":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-25T15:21:26Z","updated_at":"2021-10-05T17:58:22Z","closed_at":"2021-10-05T17:58:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nWhen we use prepare_module from a readonly file system, we create a FileLock using the `local_path`.\r\nThis path is not necessarily writable.\r\n\r\n`lock_path = local_path + \".lock\"`\r\n\r\n\r\n## Steps to reproduce the bug\r\n\r\nRun `load_dataset` on a readonly python loader file.\r\n```python\r\nds = load_dataset(\r\n python_loader, data_files={\"train\": train_path, \"test\": test_path}\r\n )\r\n```\r\n\r\nwhere `python_loader` is a path to a file located in a readonly folder.\r\n\r\n## Expected results\r\nThis should work I think?\r\n\r\n## Actual results\r\n\r\n```python\r\n return load_dataset(\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/load.py\", line 711, in load_dataset\r\n module_path, hash, resolved_file_path = prepare_module(\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/load.py\", line 465, in prepare_module\r\n with FileLock(lock_path):\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/utils\/filelock.py\", line 314, in __enter__\r\n self.acquire()\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/utils\/filelock.py\", line 263, in acquire\r\n self._acquire()\r\n File \"\/usr\/local\/lib\/python3.8\/dist-packages\/datasets\/utils\/filelock.py\", line 378, in _acquire\r\n fd = os.open(self._lock_file, open_mode)\r\nOSError: [Errno 30] Read-only file system: 'YOUR_FILE.py.lock'\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.7.0\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n- Python version: 3.8.8\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2837\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2837\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2836","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2836\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2836\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2836\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2836","id":979230142,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE5NjY5MDUy","number":2836,"title":"Optimize Dataset.filter to only compute the indices to keep","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-25T14:41:22Z","updated_at":"2021-09-14T14:51:53Z","closed_at":"2021-09-13T15:50:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2836","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2836","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2836.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2836.patch","merged_at":"2021-09-13T15:50:21Z"},"body":"Optimize `Dataset.filter` to only compute the indices of the rows to keep, instead of creating a new Arrow table with the rows to keep. Creating a new table was an issue because it could take a lot of disk space.\r\n\r\nThis will be useful to process audio datasets for example cc @patrickvonplaten ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2836\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2836\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2835","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2835\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2835\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2835\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2835","id":979209394,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE5NjUxOTE4","number":2835,"title":"Update: timit_asr - make the dataset streamable","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-25T14:22:49Z","updated_at":"2021-09-07T13:15:47Z","closed_at":"2021-09-07T13:15:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2835","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2835","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2835.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2835.patch","merged_at":"2021-09-07T13:15:46Z"},"body":"The TIMIT ASR dataset had two issues that was preventing it from being streamable:\r\n\r\n1. it was missing a call to `open` before `pd.read_csv`\r\n2. it was using `os.path.dirname` which is not supported for streaming\r\n\r\nI made the dataset streamable by using `open` to load the CSV, and by adding the support for `os.path.dirname` in dataset scripts to stream data\r\n\r\nYou can now do\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ntimit_asr = load_dataset(\"timit_asr\", streaming=True)\r\nprint(next(iter(timit_asr[\"train\"])))\r\n```\r\nprints:\r\n\r\n```json\r\n{\"file\": \"zip:\/\/data\/TRAIN\/DR4\/MMDM0\/SI681.WAV::https:\/\/data.deepai.org\/timit.zip\",\r\n\"phonetic_detail\": {\"start\": [0, 1960, 2466, 3480, 4000, 5960, 7480, 7880, 9400, 9960, 10680, 13480, 15680, 15880, 16920, 18297, 18882, 19480, 21723, 22516, 24040, 25190, 27080, 28160, 28560, 30120, 31832, 33240, 34640, 35968, 37720],\r\n\"utterance\": [\"h#\", \"w\", \"ix\", \"dcl\", \"s\", \"ah\", \"tcl\", \"ch\", \"ix\", \"n\", \"ae\", \"kcl\", \"t\", \"ix\", \"v\", \"r\", \"ix\", \"f\", \"y\", \"ux\", \"zh\", \"el\", \"bcl\", \"b\", \"iy\", \"y\", \"ux\", \"s\", \"f\", \"el\", \"h#\"],\r\n\"stop\": [1960, 2466, 3480, 4000, 5960, 7480, 7880, 9400, 9960, 10680, 13480, 15680, 15880, 16920, 18297, 18882, 19480, 21723, 22516, 24040, 25190, 27080, 28160, 28560, 30120, 31832, 33240, 34640, 35968, 37720, 39920]},\r\n\"sentence_type\": \"SI\", \"id\": \"SI681\",\r\n\"speaker_id\": \"MMDM0\",\r\n\"dialect_region\": \"DR4\",\r\n\"text\": \"Would such an act of refusal be useful?\",\r\n\"word_detail\": {\r\n \"start\": [1960, 4000, 9400, 10680, 15880, 18297, 27080, 30120],\r\n \"utterance\": [\"would\", \"such\", \"an\", \"act\", \"of\", \"refusal\", \"be\", \"useful\"],\r\n \"stop\": [4000, 9400, 10680, 15880, 18297, 27080, 30120, 37720]\r\n}}\r\n```\r\n\r\ncc @patrickvonplaten @vrindaprabhu","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2835\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2835\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2834","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2834\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2834\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2834\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2834","id":978309749,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE4OTE5NjQ0","number":2834,"title":"Fix IndexError by ignoring empty RecordBatch","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-24T17:06:13Z","updated_at":"2021-08-24T17:21:18Z","closed_at":"2021-08-24T17:21:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2834","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2834","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2834.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2834.patch","merged_at":"2021-08-24T17:21:17Z"},"body":"We need to ignore the empty record batches for the interpolation search to work correctly when querying arrow tables\r\n\r\nClose #2833\r\n\r\ncc @SaulLu ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2834\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2834\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2833","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2833\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2833\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2833\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2833","id":978296140,"node_id":"MDU6SXNzdWU5NzgyOTYxNDA=","number":2833,"title":"IndexError when accessing first element of a Dataset if first RecordBatch is empty","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-08-24T16:49:20Z","updated_at":"2021-08-24T17:21:17Z","closed_at":"2021-08-24T17:21:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The computation of the offsets of the underlying Table of a Dataset has some issues if the first RecordBatch is empty.\r\n\r\n```python\r\nfrom datasets import Dataset\r\nimport pyarrow as pa\r\n\r\npa_table = pa.Table.from_pydict({\"a\": [1]})\r\npa_table2 = pa.Table.from_pydict({\"a\": []}, schema=pa_table.schema)\r\nds_table = pa.concat_tables([pa_table2, pa_table])\r\n\r\ndataset = Dataset(ds_table)\r\n\r\nprint([len(b) for b in dataset.data._batches])\r\n# [0, 1]\r\n\r\nprint(dataset.data._offsets)\r\n# [0 0 1] (should be [0, 1])\r\n\r\ndataset[0]\r\n```\r\nraises\r\n```python\r\n---------------------------------------------------------------------------\r\nIndexError Traceback (most recent call last)\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/table.py in _interpolation_search(arr, x)\r\n 90 else:\r\n 91 i, j = i, k\r\n---> 92 raise IndexError(f\"Invalid query '{x}' for size {arr[-1] if len(arr) else 'none'}.\")\r\n 93 \r\n 94 \r\n\r\nIndexError: Invalid query '0' for size 1.\r\n```\r\n\r\nThis can be fixed by ignoring empty batches when computing `table._batches` and `table._offsets`\r\n\r\ncc @SaulLu ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2833\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2833\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2832","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2832\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2832\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2832\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2832","id":978012800,"node_id":"MDU6SXNzdWU5NzgwMTI4MDA=","number":2832,"title":"Logging levels not taken into account","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-24T11:50:41Z","updated_at":"2022-01-19T14:14:45Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nThe `logging` module isn't working as intended relative to the levels to set.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\nfrom datasets import logging\r\n\r\nlogging.set_verbosity_debug()\r\nlogger = logging.get_logger()\r\n\r\nlogger.error(\"ERROR\")\r\nlogger.warning(\"WARNING\")\r\nlogger.info(\"INFO\")\r\nlogger.debug(\"DEBUG\"\r\n```\r\n\r\n## Expected results\r\n\r\nI expect all logs to be output since I'm putting a `debug` level.\r\n\r\n## Actual results\r\n\r\nOnly the two first logs are output.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: Linux-5.13.9-arch1-1-x86_64-with-glibc2.33\r\n- Python version: 3.9.6\r\n- PyArrow version: 5.0.0\r\n\r\n## To go further\r\n\r\nThis logging issue appears in `datasets` but not in `transformers`. It happens because there is no handler defined for the logger. When no handler is defined, the `logging` library will output a one-off error to stderr, using a `StderrHandler` with level `WARNING`.\r\n\r\n`transformers` sets a default `StreamHandler` [here](https:\/\/github.com\/huggingface\/transformers\/blob\/5c6eca71a983bae2589eed01e5c04fcf88ba5690\/src\/transformers\/utils\/logging.py#L86)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2832\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2832\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2831","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2831\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2831\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2831\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2831","id":977864600,"node_id":"MDU6SXNzdWU5Nzc4NjQ2MDA=","number":2831,"title":"ArrowInvalid when mapping dataset with missing values","user":{"login":"uniquefine","id":12694730,"node_id":"MDQ6VXNlcjEyNjk0NzMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12694730?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/uniquefine","html_url":"https:\/\/github.com\/uniquefine","followers_url":"https:\/\/api.github.com\/users\/uniquefine\/followers","following_url":"https:\/\/api.github.com\/users\/uniquefine\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/uniquefine\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/uniquefine\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/uniquefine\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/uniquefine\/orgs","repos_url":"https:\/\/api.github.com\/users\/uniquefine\/repos","events_url":"https:\/\/api.github.com\/users\/uniquefine\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/uniquefine\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-24T08:50:42Z","updated_at":"2021-08-31T14:15:34Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI encountered an `ArrowInvalid` when mapping dataset with missing values. \r\nHere are the files for a minimal example. The exception is only thrown when the first line in the csv has a missing value (if you move the last line to the top it isn't thrown).\r\n[data_small.csv](https:\/\/github.com\/huggingface\/datasets\/files\/7037838\/data_small.csv)\r\n[data.csv](https:\/\/github.com\/huggingface\/datasets\/files\/7037842\/data.csv)\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndatasets = load_dataset(\"csv\", data_files=['data_small.csv'])\r\n\r\ndatasets = datasets.map(lambda e: {'labels': e['match']},\r\n remove_columns=['id'])\r\n```\r\n\r\n## Expected results\r\nNo error\r\n\r\n## Actual results\r\n```\r\nFile \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Invalid null value\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.5.0\r\n- Platform: Linux-5.11.0-25-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyTorch version (GPU?): 1.7.1+cpu (False)\r\n- Tensorflow version (GPU?): 2.4.1 (False)\r\n- Using GPU in script?: no\r\n- Using distributed or parallel set-up in script?: no\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2831\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2831\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2830","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2830\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2830\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2830\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2830","id":977563947,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE4MjkyMTM2","number":2830,"title":"Add imagefolder dataset","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-08-23T23:34:06Z","updated_at":"2021-11-15T17:56:52Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2830","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2830","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2830.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2830.patch","merged_at":null},"body":"A generic imagefolder dataset inspired by `torchvision.datasets.ImageFolder`. \r\n\r\nResolves #2508 \r\n\r\n---\r\n\r\nExample Usage:\r\n\r\n[![Open In Colab](https:\/\/colab.research.google.com\/assets\/colab-badge.svg)](https:\/\/colab.research.google.com\/gist\/nateraw\/954fa8cba4ff806f6147a782fa9efd1a\/imagefolder-official-example.ipynb)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2830\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2830\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2829","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2829\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2829\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2829\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2829","id":977233360,"node_id":"MDU6SXNzdWU5NzcyMzMzNjA=","number":2829,"title":"Optimize streaming from TAR archives","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3287858981,"node_id":"MDU6TGFiZWwzMjg3ODU4OTgx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/streaming","name":"streaming","color":"fef2c0","default":false,"description":""}],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-08-23T16:56:40Z","updated_at":"2021-10-12T10:02:29Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi ! As you know TAR has some constraints for data streaming. While it is optimized for buffering, the files in the TAR archive **need to be streamed in order**. It means that we can't choose which file to stream from, and this notation is to be avoided for TAR archives:\r\n```\r\ntar:\/\/books_large_p1.txt::https:\/\/storage.googleapis.com\/huggingface-nlp\/datasets\/bookcorpus\/bookcorpus.tar.bz2\r\n```\r\nInstead, I suggest we implement `iter_archive` for the `StreamingDownloadManager`.\r\nThe regular `DownloadManager` already has it.\r\n\r\nThen we will have to update the json\/txt\/csv\/etc. loaders to make them use `iter_archive` on TAR archives.\r\n\r\nThat's also what Tensorflow Datasets is doing in this case.\r\nSee this [dataset](https:\/\/github.com\/tensorflow\/datasets\/blob\/93895059c80a9e05805e8f32a2e310f66a23fc98\/tensorflow_datasets\/image_classification\/flowers.py) for example.\r\n\r\nTherefore instead of doing\r\n```python\r\nuncompressed = dl_manager.extract(tar_archive)\r\nfilename = \"books_large_p1.txt\"\r\nwith open(os.path.join(uncompressed, filename)) as f:\r\n for line in f:\r\n ...\r\n```\r\nwe'll do\r\n```python\r\nfor filename, f in dl_manager.iter_archive(tar_archive):\r\n for line in f:\r\n ...\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2829\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2829\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2828","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2828\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2828\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2828\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2828","id":977181517,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE3OTYwODg3","number":2828,"title":"Add code-mixed Kannada Hope speech dataset","user":{"login":"adeepH","id":46108405,"node_id":"MDQ6VXNlcjQ2MTA4NDA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46108405?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adeepH","html_url":"https:\/\/github.com\/adeepH","followers_url":"https:\/\/api.github.com\/users\/adeepH\/followers","following_url":"https:\/\/api.github.com\/users\/adeepH\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adeepH\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adeepH\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adeepH\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adeepH\/orgs","repos_url":"https:\/\/api.github.com\/users\/adeepH\/repos","events_url":"https:\/\/api.github.com\/users\/adeepH\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adeepH\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-23T15:55:09Z","updated_at":"2021-10-01T17:21:03Z","closed_at":"2021-10-01T17:21:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2828","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2828","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2828.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2828.patch","merged_at":null},"body":"## Adding a Dataset\r\n- **Name:** *KanHope*\r\n- **Description:** *A code-mixed English-Kannada dataset for Hope speech detection*\r\n- **Paper:** *https:\/\/arxiv.org\/abs\/2108.04616* \r\n- **Data:** *https:\/\/github.com\/adeepH\/KanHope\/tree\/main\/dataset*\r\n- **Motivation:** *The dataset is amongst the very few resources available for code-mixed low-resourced Dravidian languages of India*","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2828\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2828\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2827","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2827\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2827\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2827\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2827","id":976976552,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE3Nzg3MjEw","number":2827,"title":"add a text classification dataset","user":{"login":"adeepH","id":46108405,"node_id":"MDQ6VXNlcjQ2MTA4NDA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46108405?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adeepH","html_url":"https:\/\/github.com\/adeepH","followers_url":"https:\/\/api.github.com\/users\/adeepH\/followers","following_url":"https:\/\/api.github.com\/users\/adeepH\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adeepH\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adeepH\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adeepH\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adeepH\/orgs","repos_url":"https:\/\/api.github.com\/users\/adeepH\/repos","events_url":"https:\/\/api.github.com\/users\/adeepH\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adeepH\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-23T12:24:41Z","updated_at":"2021-08-23T15:51:18Z","closed_at":"2021-08-23T15:51:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2827","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2827","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2827.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2827.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2827\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2827\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2826","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2826\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2826\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2826\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2826","id":976974254,"node_id":"MDU6SXNzdWU5NzY5NzQyNTQ=","number":2826,"title":"Add a Text Classification dataset: KanHope","user":{"login":"adeepH","id":46108405,"node_id":"MDQ6VXNlcjQ2MTA4NDA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46108405?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adeepH","html_url":"https:\/\/github.com\/adeepH","followers_url":"https:\/\/api.github.com\/users\/adeepH\/followers","following_url":"https:\/\/api.github.com\/users\/adeepH\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adeepH\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adeepH\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adeepH\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adeepH\/orgs","repos_url":"https:\/\/api.github.com\/users\/adeepH\/repos","events_url":"https:\/\/api.github.com\/users\/adeepH\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adeepH\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-23T12:21:58Z","updated_at":"2021-10-01T18:06:59Z","closed_at":"2021-10-01T18:06:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *KanHope*\r\n- **Description:** *A code-mixed English-Kannada dataset for Hope speech detection*\r\n- **Paper:** *https:\/\/arxiv.org\/abs\/2108.04616* (I am the author of the paper}\r\n- **Author:** *[AdeepH](https:\/\/github.com\/adeepH)*\r\n- **Data:** *https:\/\/github.com\/adeepH\/KanHope\/tree\/main\/dataset*\r\n- **Motivation:** *The dataset is amongst the very few resources available for code-mixed Dravidian languages*\r\n\r\n- I tried following the steps as per the instructions. However, could not resolve an error. Any help would be appreciated.\r\n\r\n- The dataset card and the scripts for the dataset *https:\/\/github.com\/adeepH\/datasets\/tree\/multilingual-hope-speech\/datasets\/mhs_eval*\r\n\r\n```\r\nUsing custom data configuration default\r\nDownloading and preparing dataset bn_hate_speech\/default (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/root\/.cache\/huggingface\/datasets\/bn_hate_speech\/default\/0.0.0\/5f417ddc89777278abd29988f909f39495f0ec802090f7d8fa63b5bffb121762...\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n in ()\r\n 1 from datasets import load_dataset\r\n 2 \r\n----> 3 data = load_dataset('\/content\/bn')\r\n\r\n9 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, task, streaming, **config_kwargs)\r\n 850 ignore_verifications=ignore_verifications,\r\n 851 try_from_hf_gcs=try_from_hf_gcs,\r\n--> 852 use_auth_token=use_auth_token,\r\n 853 )\r\n 854 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 614 if not downloaded_from_gcs:\r\n 615 self._download_and_prepare(\r\n--> 616 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 617 )\r\n 618 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 691 try:\r\n 692 # Prepare split will record examples associated to the split\r\n--> 693 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 694 except OSError as e:\r\n 695 raise OSError(\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in _prepare_split(self, split_generator)\r\n 1107 disable=bool(logging.get_verbosity() == logging.NOTSET),\r\n 1108 ):\r\n-> 1109 example = self.info.features.encode_example(record)\r\n 1110 writer.write(example, key)\r\n 1111 finally:\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/features.py in encode_example(self, example)\r\n 1015 \"\"\"\r\n 1016 example = cast_to_python_objects(example)\r\n-> 1017 return encode_nested_example(self, example)\r\n 1018 \r\n 1019 def encode_batch(self, batch):\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/features.py in encode_nested_example(schema, obj)\r\n 863 if isinstance(schema, dict):\r\n 864 return {\r\n--> 865 k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n 866 }\r\n 867 elif isinstance(schema, (list, tuple)):\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/features.py in (.0)\r\n 863 if isinstance(schema, dict):\r\n 864 return {\r\n--> 865 k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n 866 }\r\n 867 elif isinstance(schema, (list, tuple)):\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/features.py in encode_nested_example(schema, obj)\r\n 890 # ClassLabel will convert from string to int, TranslationVariableLanguages does some checks\r\n 891 elif isinstance(schema, (ClassLabel, TranslationVariableLanguages, Value, _ArrayXD)):\r\n--> 892 return schema.encode_example(obj)\r\n 893 # Other object should be directly convertible to a native Arrow type (like Translation and Translation)\r\n 894 return obj\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/features.py in encode_example(self, example_data)\r\n 665 # If a string is given, convert to associated integer\r\n 666 if isinstance(example_data, str):\r\n--> 667 example_data = self.str2int(example_data)\r\n 668 \r\n 669 # Allowing -1 to mean no label.\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/features.py in str2int(self, values)\r\n 623 if value not in self._str2int:\r\n 624 value = str(value).strip()\r\n--> 625 output.append(self._str2int[str(value)])\r\n 626 else:\r\n 627 # No names provided, try to integerize\r\n\r\nKeyError: ' '\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2826\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2826\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2825","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2825\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2825\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2825\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2825","id":976584926,"node_id":"MDU6SXNzdWU5NzY1ODQ5MjY=","number":2825,"title":"The datasets.map function does not load cached dataset after moving python script","user":{"login":"hobbitlzy","id":35392624,"node_id":"MDQ6VXNlcjM1MzkyNjI0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35392624?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hobbitlzy","html_url":"https:\/\/github.com\/hobbitlzy","followers_url":"https:\/\/api.github.com\/users\/hobbitlzy\/followers","following_url":"https:\/\/api.github.com\/users\/hobbitlzy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hobbitlzy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hobbitlzy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hobbitlzy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hobbitlzy\/orgs","repos_url":"https:\/\/api.github.com\/users\/hobbitlzy\/repos","events_url":"https:\/\/api.github.com\/users\/hobbitlzy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hobbitlzy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-08-23T03:23:37Z","updated_at":"2021-08-31T13:14:41Z","closed_at":"2021-08-31T13:13:36Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe datasets.map function caches the processed data to a certain directory. When the map function is called another time with totally the same parameters, the cached data are supposed to be reloaded instead of re-processing. However, it doesn't reuse cached data sometimes. I use the common data processing in different tasks, the datasets are processed again, the only difference is that I run them in different files.\r\n\r\n## Steps to reproduce the bug\r\nJust run the following codes in different .py files.\r\n```python\r\nif __name__ == '__main__':\r\n from datasets import load_dataset\r\n from transformers import AutoTokenizer\r\n raw_datasets = load_dataset(\"wikitext\", \"wikitext-2-raw-v1\")\r\n\r\n tokenizer = AutoTokenizer.from_pretrained(\"bert-base-uncased\")\r\n\r\n\r\n def tokenize_function(examples):\r\n return tokenizer(examples[\"text\"], padding=\"max_length\", truncation=True)\r\n\r\n\r\n tokenized_datasets = raw_datasets.map(tokenize_function, batched=True)\r\n```\r\n\r\n## Expected results\r\nThe map function should reload data in the second or any later runs.\r\n\r\n## Actual results\r\nThe processing happens in each run.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: linux\r\n- Python version: 3.7.6\r\n- PyArrow version: 3.0.0\r\n\r\nThis is the first time I report a bug. If there is any problem or confusing description, please let me know \ud83d\ude04.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2825\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2825\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2824","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2824\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2824\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2824\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2824","id":976394721,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE3MzIyMzY5","number":2824,"title":"Fix defaults in cache_dir docstring in load.py","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-22T14:48:37Z","updated_at":"2021-08-26T13:23:32Z","closed_at":"2021-08-26T11:55:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2824","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2824","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2824.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2824.patch","merged_at":"2021-08-26T11:55:16Z"},"body":"Fix defaults in the `cache_dir` docstring.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2824\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2824\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2823","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2823\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2823\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2823\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2823","id":976135355,"node_id":"MDU6SXNzdWU5NzYxMzUzNTU=","number":2823,"title":"HF_DATASETS_CACHE variable in Windows","user":{"login":"rp2839","id":8453798,"node_id":"MDQ6VXNlcjg0NTM3OTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8453798?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rp2839","html_url":"https:\/\/github.com\/rp2839","followers_url":"https:\/\/api.github.com\/users\/rp2839\/followers","following_url":"https:\/\/api.github.com\/users\/rp2839\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rp2839\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rp2839\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rp2839\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rp2839\/orgs","repos_url":"https:\/\/api.github.com\/users\/rp2839\/repos","events_url":"https:\/\/api.github.com\/users\/rp2839\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rp2839\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-21T13:17:44Z","updated_at":"2021-08-21T13:20:11Z","closed_at":"2021-08-21T13:20:11Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I can't seem to use a custom Cache directory in Windows. I have tried:\r\n\r\nset HF_DATASETS_CACHE = \"C:\\Datasets\"\r\nset HF_DATASETS_CACHE = \"C:\/Datasets\"\r\nset HF_DATASETS_CACHE = \"C:\\\\Datasets\"\r\nset HF_DATASETS_CACHE = \"r'C:\\Datasets'\"\r\nset HF_DATASETS_CACHE = \"\\Datasets\"\r\nset HF_DATASETS_CACHE = \"\/Datasets\"\r\n\r\nIn each instance I get the \"[WinError 123] The filename, directory name, or volume label syntax is incorrect\" error when attempting to load a dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2823\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2823\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2822","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2822\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2822\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2822\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2822","id":975744463,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE2ODUxMTAy","number":2822,"title":"Add url prefix convention for many compression formats","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-08-20T16:11:23Z","updated_at":"2021-08-23T15:59:16Z","closed_at":"2021-08-23T15:59:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2822","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2822","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2822.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2822.patch","merged_at":"2021-08-23T15:59:14Z"},"body":"## Intro\r\n\r\nWhen doing dataset streaming, the uncompression of compressed files is done on the fly using `fsspec`.\r\n\r\nIn particular, the download manager method `download_and_extract` doesn't return a path to the local download and extracted file, but instead a chained URL so that the uncompression can be done when the file is opened. A few examples of chained URLS:\r\n- `gz:\/\/file.txt::https:\/\/foo.bar\/file.txt.gz`\r\n- `bz2:\/\/file.txt::https:\/\/foo.bar\/file.txt.bz2`\r\n- `zip:\/\/::https:\/\/foo.bar\/archive.zip`\r\n- `tar:\/\/::https:\/\/foo.bar\/archive.tar.gz` (the TAR uncompression includes gz, bz2 etc. uncompression in `fsspec`)\r\n\r\nThis syntax is highly inspired by the `fsspec` URL chaining syntax from https:\/\/filesystem-spec.readthedocs.io\/en\/latest\/features.html#url-chaining\r\n\r\nThis url prefixing allows `open` to know what kind of uncompression to do in a dataset script when doing\r\n```python\r\ndef _generate_examples(self, urlpath):\r\n with open(urlpath) as f:\r\n ....\r\n```\r\n\r\n## What it changes\r\n\r\nThis changes the previous behavior from https:\/\/github.com\/huggingface\/datasets\/pull\/2786 , in which `open` was trying to infer the compression automatically. Infering the compression made it impossible to know whether the user wanted `open` to return compressed data (as the default behavior of the buitin open), or the uncompressed data. By adding uncompression prefixes to the URL, `open` know directly if it has to uncompress or not, and also which protocol to use.\r\n\r\n## Additional notes\r\n\r\nThis PR should close https:\/\/github.com\/huggingface\/datasets\/issues\/2813\r\n\r\nIt should also close this PR https:\/\/github.com\/huggingface\/datasets\/pull\/2811 since the oscar dataset script won't try to uncompress twice anymore\r\n\r\nNote that I had to temporarily remove the support for passing tar and zip files to `data_files` for streaming to make it work, since it makes it ambiguous whether a zip file passed as `data_files` should be uncompressed or not. IMO we can make it work again by changing the syntax to make the glob explicit:\r\n```python\r\nload_dataset(\"json\", data_files=\"zip:\/\/*.jsonl::https:\/\/foo.bar\/archive.zip\")\r\n```\r\nThis is the exact same convention as fsspec and it removes all ambiguities\r\n\r\ncc @albertvillanova @lewtun ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2822\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2822\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2821","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2821\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2821\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2821\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2821","id":975556032,"node_id":"MDU6SXNzdWU5NzU1NTYwMzI=","number":2821,"title":"Cannot load linnaeus dataset","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-20T12:15:15Z","updated_at":"2021-08-31T13:13:02Z","closed_at":"2021-08-31T13:12:09Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe [linnaeus](https:\/\/huggingface.co\/datasets\/linnaeus) dataset cannot be loaded. To reproduce:\r\n```\r\nfrom datasets import load_dataset\r\n\r\ndatasets = load_dataset(\"linnaeus\")\r\n```\r\nThis results in:\r\n```\r\nDownloading and preparing dataset linnaeus\/linnaeus (download: 17.36 MiB, generated: 8.74 MiB, post-processed: Unknown size, total: 26.10 MiB) to \/root\/.cache\/huggingface\/datasets\/linnaeus\/linnaeus\/1.0.0\/2ff05dbc256108233262f596e09e322dbc3db067202de14286913607cd9cb704...\r\n---------------------------------------------------------------------------\r\nConnectionError Traceback (most recent call last)\r\n in ()\r\n 1 from datasets import load_dataset\r\n 2 \r\n----> 3 datasets = load_dataset(\"linnaeus\")\r\n\r\n11 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag, max_retries, use_auth_token)\r\n 603 raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\n 604 _raise_if_offline_mode_is_enabled(f\"Tried to reach {url}\")\r\n--> 605 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n 606 \r\n 607 # Try a second time\r\n\r\nConnectionError: Couldn't reach https:\/\/drive.google.com\/u\/0\/uc?id=1OletxmPYNkz2ltOr9pyT0b0iBtUWxslh&export=download\/\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2821\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2821\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2820","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2820\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2820\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2820\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2820","id":975210712,"node_id":"MDU6SXNzdWU5NzUyMTA3MTI=","number":2820,"title":"Downloading \u201creddit\u201d dataset keeps timing out.","user":{"login":"smeyerhot","id":43877130,"node_id":"MDQ6VXNlcjQzODc3MTMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43877130?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/smeyerhot","html_url":"https:\/\/github.com\/smeyerhot","followers_url":"https:\/\/api.github.com\/users\/smeyerhot\/followers","following_url":"https:\/\/api.github.com\/users\/smeyerhot\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/smeyerhot\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/smeyerhot\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/smeyerhot\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/smeyerhot\/orgs","repos_url":"https:\/\/api.github.com\/users\/smeyerhot\/repos","events_url":"https:\/\/api.github.com\/users\/smeyerhot\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/smeyerhot\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-08-20T02:52:36Z","updated_at":"2021-09-08T14:52:02Z","closed_at":"2021-09-08T14:52:02Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\nEverytime I try and download the reddit dataset it times out before finishing and I have to try again.\r\n\r\nThere is some timeout error that I will post once it happens again.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"reddit\", ignore_verifications=True, cache_dir=\"\/Volumes\/My Passport for Mac\/og-chat-data\")\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\nI would expect the download to finish, or at least provide a parameter to extend the read timeout window.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\nShown below in error message.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: macOS \r\n- Python version: 3.9.6 (conda env)\r\n- PyArrow version: N\/A\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2820\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2820\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2819","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2819\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2819\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2819\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2819","id":974683155,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE1OTUyMjE1","number":2819,"title":"Added XL-Sum dataset","user":{"login":"abhik1505040","id":49608995,"node_id":"MDQ6VXNlcjQ5NjA4OTk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49608995?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhik1505040","html_url":"https:\/\/github.com\/abhik1505040","followers_url":"https:\/\/api.github.com\/users\/abhik1505040\/followers","following_url":"https:\/\/api.github.com\/users\/abhik1505040\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhik1505040\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhik1505040\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhik1505040\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhik1505040\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhik1505040\/repos","events_url":"https:\/\/api.github.com\/users\/abhik1505040\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhik1505040\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-08-19T13:47:45Z","updated_at":"2021-09-29T08:13:44Z","closed_at":"2021-09-23T17:49:05Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2819","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2819","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2819.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2819.patch","merged_at":null},"body":"Added XL-Sum dataset published in ACL-IJCNLP 2021. (https:\/\/aclanthology.org\/2021.findings-acl.413\/). The default timeout values in `src\/datasets\/utils\/file_utls.py` were increased to enable downloading from the original google drive links.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2819\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2819\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2818","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2818\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2818\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2818\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2818","id":974552009,"node_id":"MDU6SXNzdWU5NzQ1NTIwMDk=","number":2818,"title":"cannot load data from my loacal path","user":{"login":"yang-collect","id":46920280,"node_id":"MDQ6VXNlcjQ2OTIwMjgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46920280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yang-collect","html_url":"https:\/\/github.com\/yang-collect","followers_url":"https:\/\/api.github.com\/users\/yang-collect\/followers","following_url":"https:\/\/api.github.com\/users\/yang-collect\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yang-collect\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yang-collect\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yang-collect\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yang-collect\/orgs","repos_url":"https:\/\/api.github.com\/users\/yang-collect\/repos","events_url":"https:\/\/api.github.com\/users\/yang-collect\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yang-collect\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-19T11:13:30Z","updated_at":"2021-08-31T08:46:16Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI just want to directly load data from my local path,but find a bug.And I compare it with pandas to provide my local path is real.\r\n\r\nhere is my code\r\n```python3\r\n# print my local path\r\nprint(config.train_path)\r\n# read data and print data length\r\ntarin=pd.read_csv(config.train_path)\r\nprint(len(tarin))\r\n\r\n# loading data by load_dataset \r\ndata = load_dataset('csv',data_files=config.train_path)\r\n\r\nprint(len(data))\r\n```\r\n\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nC:\\Users\\wie\\Documents\\\u9879\u76ee\\\u6587\u672c\u5206\u7c7b\\data\\train.csv\r\n7613\r\nTraceback (most recent call last):\r\n File \"c:\/Users\/wie\/Documents\/\u9879\u76ee\/\u6587\u672c\u5206\u7c7b\/lib\/DataPrecess.py\", line 17, in \r\n data = load_dataset('csv',data_files=config.train_path)\r\n File \"C:\\Users\\wie\\Miniconda3\\lib\\site-packages\\datasets\\load.py\", line 830, in load_dataset\r\n **config_kwargs,\r\n File \"C:\\Users\\wie\\Miniconda3\\lib\\site-packages\\datasets\\load.py\", line 710, in load_dataset_builder\r\n **config_kwargs,\r\n File \"C:\\Users\\wie\\Miniconda3\\lib\\site-packages\\datasets\\builder.py\", line 271, in __init__\r\n **config_kwargs,\r\n File \"C:\\Users\\wie\\Miniconda3\\lib\\site-packages\\datasets\\builder.py\", line 386, in _create_builder_config\r\n config_kwargs, custom_features=custom_features, use_auth_token=self.use_auth_token\r\n File \"C:\\Users\\wie\\Miniconda3\\lib\\site-packages\\datasets\\builder.py\", line 156, in create_config_id\r\n raise ValueError(\"Please provide a valid `data_files` in `DatasetBuilder`\")\r\nValueError: Please provide a valid `data_files` in `DatasetBuilder`\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: win10\r\n- Python version: 3.7.9\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2818\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2818\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2817","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2817\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2817\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2817\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2817","id":974486051,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE1NzgzMDQ3","number":2817,"title":"Rename The Pile subsets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-19T09:56:22Z","updated_at":"2021-08-23T16:24:10Z","closed_at":"2021-08-23T16:24:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2817","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2817","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2817.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2817.patch","merged_at":"2021-08-23T16:24:09Z"},"body":"After discussing with @yjernite we think it's better to have the subsets of The Pile explicitly have \"the_pile\" in their names.\r\n\r\nI'm doing the changes for the subsets that @richarddwang added:\r\n- [x] books3 -> the_pile_books3 https:\/\/github.com\/huggingface\/datasets\/pull\/2801\r\n- [x] stack_exchange -> the_pile_stack_exchange https:\/\/github.com\/huggingface\/datasets\/pull\/2803\r\n- [x] openwebtext2 -> the_pile_openwebtext2 https:\/\/github.com\/huggingface\/datasets\/pull\/2802\r\n\r\nFor consistency we should also rename `bookcorpusopen` to `the_pile_bookcorpus` IMO, but let me know what you think.\r\n(we can just add a deprecation message to `bookcorpusopen` for now and add `the_pile_bookcorpus`)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2817\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2817\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2816","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2816\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2816\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2816\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2816","id":974031404,"node_id":"MDU6SXNzdWU5NzQwMzE0MDQ=","number":2816,"title":"Add Mostly Basic Python Problems Dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-18T20:28:39Z","updated_at":"2021-09-10T08:04:20Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Mostly Basic Python Problems Dataset\r\n- **Description:** The benchmark consists of around 1,000 crowd-sourced Python programming problems, designed to be solvable by entry level programmers, covering programming fundamentals, standard library functionality, and so on. Each problem consists of a task description, code solution and 3 automated test cases.\r\n- **Paper:** *link to the dataset paper if available*\r\n- **Data:** https:\/\/github.com\/google-research\/google-research\/tree\/master\/mbpp\r\n- **Motivation:** Simple, small dataset related to coding problems.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2816\/reactions","total_count":3,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2816\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2815","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2815\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2815\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2815\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2815","id":973862024,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE1MjUxNDQ5","number":2815,"title":"Tiny typo fixes of \"fo\" -> \"of\"","user":{"login":"aronszanto","id":9934829,"node_id":"MDQ6VXNlcjk5MzQ4Mjk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9934829?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aronszanto","html_url":"https:\/\/github.com\/aronszanto","followers_url":"https:\/\/api.github.com\/users\/aronszanto\/followers","following_url":"https:\/\/api.github.com\/users\/aronszanto\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aronszanto\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aronszanto\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aronszanto\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aronszanto\/orgs","repos_url":"https:\/\/api.github.com\/users\/aronszanto\/repos","events_url":"https:\/\/api.github.com\/users\/aronszanto\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aronszanto\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-18T16:36:11Z","updated_at":"2021-08-19T08:03:02Z","closed_at":"2021-08-19T08:03:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2815","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2815","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2815.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2815.patch","merged_at":"2021-08-19T08:03:02Z"},"body":"Noticed a few of these when reading docs- feel free to ignore the PR and just fix on some main contributor branch if more helpful. Thanks for the great library! :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2815\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2815\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2814","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2814\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2814\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2814\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2814","id":973632645,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE1MDUwODc4","number":2814,"title":"Bump tqdm version","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-18T12:51:29Z","updated_at":"2021-08-18T13:44:11Z","closed_at":"2021-08-18T13:39:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2814","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2814","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2814.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2814.patch","merged_at":"2021-08-18T13:39:49Z"},"body":"The recently released tqdm 4.62.1 includes a fix for PermissionError on Windows (submitted by me in https:\/\/github.com\/tqdm\/tqdm\/pull\/1207), which means we can remove expensive `gc.collect` calls by bumping tqdm to that version. This PR does exactly that and, additionally, fixes a `disable_tqdm` definition that would previously, if used, raise a PermissionError on Windows.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2814\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2814\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2813","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2813\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2813\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2813\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2813","id":973470580,"node_id":"MDU6SXNzdWU5NzM0NzA1ODA=","number":2813,"title":"Remove compression from xopen","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-18T09:35:59Z","updated_at":"2021-08-23T15:59:14Z","closed_at":"2021-08-23T15:59:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"We implemented support for streaming with 2 requirements:\r\n- transparent use for the end user: just needs to pass the parameter `streaming=True`\r\n- no additional work for the contributors: previous loading scripts should also work in streaming mode with no (or minor) changes; and new loading scripts should not involve additional code to support streaming\r\n\r\nIn order to fulfill these requirements, streaming implementation patched some Python functions:\r\n- the `open(urlpath)` function was patched with `fsspec.open(urlpath)`\r\n- the `os.path.join(urlpath, *others)` function was patched in order to add to `urlpath` hops (`::`) and extractor protocols (`zip:\/\/`), which are required by `fsspec.open`\r\n\r\nRecently, we implemented support for streaming all archive+compression formats: zip, tar, gz, bz2, lz4, xz, zst; tar.gz, tar.bz2,...\r\nUnder the hood, the implementation:\r\n- passes an additional parameter `compression` to `fsspec.open`, so that it performs the decompression on the fly: `fsspec.open(urlpath, compression=...)`\r\n\r\nSome concerns have been raised about passing the parameter `compression` to `fsspec.open`:\r\n- https:\/\/github.com\/huggingface\/datasets\/pull\/2786#discussion_r689550254\r\n- #2811 \r\n\r\nThe main argument is that if `open` decompresses the file and afterwards we call `gzip.open` on it, that will raise an error in `oscar` dataset:\r\n```python\r\ngzip.open(open(urlpath\r\n```\r\nWhile this is true:\r\n- it is not natural\/usual to call `open` inside `gzip.open` (never seen this before)\r\n- indeed, this was recently (2 months ago) coded that way in `datasets` in order to allow streaming support (with previous implementation of streaming)\r\n\r\nIn this particular case, there is a natural fix solution: #2811:\r\n- Revert the `open` inside the `gzip.open` (change done 2 months ago): `gzip.open(open(urlpath` => `gzip.open(urlpath`\r\n- Patch `gzip.open(urlpath` with `fsspec.open(urlpath, compression=\"gzip\"` \r\n\r\nAre there other issues apart from this?\r\n\r\nNote that there is an issue just because the open inside of the gzip.open. There is no issue in the other cases where datasets loading scripts use just\r\n- `gzip.open` \r\n- `open` (after having called dl_manager.download_and_extract)\r\n\r\nTODO:\r\n- [ ] Is this really an issue? Please enumerate the `datasets` loading scripts where this is problematic.\r\n - For the moment, there are only 3 datasets where we have an `open` inside a `gzip.open`:\r\n - oscar (since 23 June), mc4 (since 2 July) and c4 (since 2 July)\r\n - In the 3 datasets, the only reason to put an open inside a gzip.open was indeed to force supporting streaming\r\n- [ ] If this is indeed an issue, which are the possible alternatives? Pros\/cons?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2813\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2813\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2812","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2812\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2812\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2812\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2812","id":972936889,"node_id":"MDU6SXNzdWU5NzI5MzY4ODk=","number":2812,"title":"arXiv Dataset verification problem","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-17T18:01:48Z","updated_at":"2022-01-19T14:15:35Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n`dataset_infos.json` for `arxiv_dataset` contains a fixed number of training examples, however the data (downloaded from an external source) is updated every week with additional examples.\r\nTherefore, loading the dataset without `ignore_verifications=True` results in a verification error.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2812\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2812\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2811","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2811\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2811\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2811\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2811","id":972522480,"node_id":"MDExOlB1bGxSZXF1ZXN0NzE0MTAzNDIy","number":2811,"title":"Fix stream oscar","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-08-17T10:10:59Z","updated_at":"2021-08-26T10:26:15Z","closed_at":"2021-08-26T10:26:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2811","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2811","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2811.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2811.patch","merged_at":null},"body":"Previously, an additional `open` was added to oscar to make it stream-compatible: 587bbb94e891b22863b312b99696e32708c379f4.\r\n\r\nThis was argued that might be problematic: https:\/\/github.com\/huggingface\/datasets\/pull\/2786#discussion_r690045921\r\n\r\nThis PR:\r\n- removes that additional `open`\r\n- patches `gzip.open` with `xopen` + `compression=\"gzip\"`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2811\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2811\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2810","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2810\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2810\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2810\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2810","id":972040022,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEzNjkzMTI1","number":2810,"title":"Add WIT Dataset","user":{"login":"hassiahk","id":13920778,"node_id":"MDQ6VXNlcjEzOTIwNzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13920778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hassiahk","html_url":"https:\/\/github.com\/hassiahk","followers_url":"https:\/\/api.github.com\/users\/hassiahk\/followers","following_url":"https:\/\/api.github.com\/users\/hassiahk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hassiahk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hassiahk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hassiahk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hassiahk\/orgs","repos_url":"https:\/\/api.github.com\/users\/hassiahk\/repos","events_url":"https:\/\/api.github.com\/users\/hassiahk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hassiahk\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-16T19:34:09Z","updated_at":"2021-08-17T17:14:18Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2810","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2810","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2810.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2810.patch","merged_at":null},"body":"Adds Google's [WIT](https:\/\/github.com\/google-research-datasets\/wit) dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2810\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2810\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2809","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2809\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2809\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2809\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2809","id":971902613,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEzNTc2Njcz","number":2809,"title":"Add Beans Dataset","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-16T16:22:33Z","updated_at":"2021-08-26T11:42:27Z","closed_at":"2021-08-26T11:42:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2809","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2809","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2809.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2809.patch","merged_at":"2021-08-26T11:42:27Z"},"body":"Adds the [beans](https:\/\/github.com\/AI-Lab-Makerere\/ibean\/) image classification dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2809\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2809\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2808","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2808\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2808\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2808\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2808","id":971882320,"node_id":"MDU6SXNzdWU5NzE4ODIzMjA=","number":2808,"title":"Enable streaming for Wikipedia corpora","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-16T15:59:12Z","updated_at":"2021-08-16T15:59:12Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nSeveral of the [Wikipedia corpora](https:\/\/huggingface.co\/datasets?search=wiki) on the Hub involve quite large files that would be a good candidate for streaming. Currently it is not possible to stream these corpora:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n# Throws ValueError: Builder wikipedia is not streamable.\r\nwiki_dataset_streamed = load_dataset(\"wikipedia\", \"20200501.en\", split=\"train\", streaming=True)\r\n```\r\n\r\nGiven that these corpora are derived from Wikipedia dumps in XML format which are then processed with Apache Beam, I am not sure whether streaming is possible in principle. The goal of this issue is to discuss whether this feature even makes sense :)\r\n\r\n**Describe the solution you'd like**\r\nIt would be nice to be able to stream Wikipedia corpora from the Hub with something like\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nwiki_dataset_streamed = load_dataset(\"wikipedia\", \"20200501.en\", split=\"train\", streaming=True)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2808\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2808\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2807","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2807\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2807\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2807\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2807","id":971849863,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEzNTMxNjIw","number":2807,"title":"Add cats_vs_dogs dataset","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-16T15:21:11Z","updated_at":"2021-08-30T16:35:25Z","closed_at":"2021-08-30T16:35:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2807","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2807","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2807.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2807.patch","merged_at":"2021-08-30T16:35:24Z"},"body":"Adds Microsoft's [Cats vs. Dogs](https:\/\/www.microsoft.com\/en-us\/download\/details.aspx?id=54765) dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2807\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2807\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2806","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2806\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2806\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2806\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2806","id":971625449,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEzMzM5NDUw","number":2806,"title":"Fix streaming tar files from canonical datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-08-16T11:10:28Z","updated_at":"2021-10-13T09:04:03Z","closed_at":"2021-10-13T09:04:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2806","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2806","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2806.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2806.patch","merged_at":null},"body":"Previous PR #2800 implemented support to stream remote tar files when passing the parameter `data_files`: they required a glob string `\"*\"`.\r\n\r\nHowever, this glob string creates an error when streaming canonical datasets (with a `join` after the `open`).\r\n\r\nThis PR fixes this issue and allows streaming tar files both from:\r\n- canonical datasets scripts and\r\n- data files.\r\n\r\nThis PR also adds support for compressed tar files: `.tar.gz`, `.tar.bz2`,...\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2806\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2806\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2805","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2805\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2805\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2805\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2805","id":971436456,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEzMTc3MTI4","number":2805,"title":"Fix streaming zip files from canonical datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-16T07:11:40Z","updated_at":"2021-08-16T10:34:00Z","closed_at":"2021-08-16T10:34:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2805","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2805","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2805.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2805.patch","merged_at":"2021-08-16T10:34:00Z"},"body":"Previous PR #2798 fixed streaming remote zip files when passing the parameter `data_files`.\r\n\r\nHowever, that broke streaming zip files used in canonical `datasets` scripts, which normally have a subsequent `join()` (patched with `xjoin()`) after the `StreamingDownloadManager.download_and_extract()` is called.\r\n\r\nThis PR fixes this issue and allows streaming zip files both from:\r\n- canonical datasets scripts and\r\n- data files.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2805\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2805\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2804","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2804\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2804\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2804\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2804","id":971353437,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEzMTA2NTMw","number":2804,"title":"Add Food-101","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-16T04:26:15Z","updated_at":"2021-08-20T14:31:33Z","closed_at":"2021-08-19T12:48:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2804","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2804","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2804.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2804.patch","merged_at":"2021-08-19T12:48:06Z"},"body":"Adds image classification dataset [Food-101](https:\/\/data.vision.ee.ethz.ch\/cvl\/datasets_extra\/food-101\/).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2804\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2804\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2803","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2803\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2803\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2803\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2803","id":970858928,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEyNzQxODMz","number":2803,"title":"add stack exchange","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-14T08:11:02Z","updated_at":"2021-08-19T10:07:33Z","closed_at":"2021-08-19T08:07:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2803","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2803","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2803.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2803.patch","merged_at":"2021-08-19T08:07:38Z"},"body":"stack exchange is part of EleutherAI\/The Pile, but AFAIK, The Pile dataset blend all sub datasets together thus we are not able to use just one of its sub dataset from The Pile data. So I create an independent dataset using The Pile preliminary components.\r\n\r\nI also change default `timeout` to 100 seconds instead of 10 seconds, otherwise I keep getting read time out when downloading source data of stack exchange and cc100 dataset.\r\n\r\nWhen I was creating dataset card. I found there is room for creating \/ editing dataset card. I've made it an issue. #2797\r\n\r\nAlso I am wondering whether the import of The Pile dataset is actively undertaken (because I may need it recently)? #1675","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2803\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2803\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2802","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2802\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2802\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2802\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2802","id":970848302,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEyNzM0MTc3","number":2802,"title":"add openwebtext2","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-08-14T07:09:03Z","updated_at":"2021-08-23T14:06:14Z","closed_at":"2021-08-23T14:06:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2802","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2802","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2802.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2802.patch","merged_at":"2021-08-23T14:06:14Z"},"body":"openwebtext2 is part of EleutherAI\/The Pile, but AFAIK, The Pile dataset blend all sub datasets together thus we are not able to use just one of its sub dataset from The Pile data. So I create an independent dataset using The Pile preliminary components.\r\n\r\nWhen I was creating dataset card. I found there is room for creating \/ editing dataset card. I've made it an issue. #2797\r\n\r\nAlso I am wondering whether the import of The Pile dataset is actively undertaken (because I may need it recently)? #1675","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2802\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2802\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2801","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2801\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2801\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2801\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2801","id":970844617,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEyNzMwODEz","number":2801,"title":"add books3","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-08-14T07:04:25Z","updated_at":"2021-08-19T16:43:09Z","closed_at":"2021-08-18T15:36:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2801","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2801","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2801.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2801.patch","merged_at":"2021-08-18T15:36:59Z"},"body":"books3 is part of EleutherAI\/The Pile, but AFAIK, The Pile dataset blend all sub datasets together thus we are not able to use just one of its sub dataset from The Pile data. So I create an independent dataset using The Pile preliminary components.\r\n\r\nWhen I was creating dataset card. I found there is room for creating \/ editing dataset card. I've made it an issue. #2797 \r\n\r\nAlso I am wondering whether the import of The Pile dataset is actively undertaken (because I may need it recently)? #1675","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2801\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2801\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2800","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2800\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2800\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2800\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2800","id":970819988,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEyNzExNTcx","number":2800,"title":"Support streaming tar files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-14T04:40:17Z","updated_at":"2021-08-26T10:02:30Z","closed_at":"2021-08-14T04:55:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2800","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2800","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2800.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2800.patch","merged_at":"2021-08-14T04:55:57Z"},"body":"This PR adds support to stream tar files by using the `fsspec` tar protocol.\r\n\r\nIt also uses the custom `readline` implemented in PR #2786.\r\n\r\nThe corresponding test is implemented in PR #2786.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2800\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2800\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2799","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2799\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2799\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2799\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2799","id":970507351,"node_id":"MDU6SXNzdWU5NzA1MDczNTE=","number":2799,"title":"Loading JSON throws ArrowNotImplementedError","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2021-08-13T15:31:48Z","updated_at":"2022-01-10T18:59:32Z","closed_at":"2022-01-10T18:59:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI have created a [dataset](https:\/\/huggingface.co\/datasets\/lewtun\/github-issues-test) of GitHub issues in line-separated JSON format and am finding that I cannot load it with the `json` loading script (see stack trace below).\r\n\r\nCuriously, there is no problem loading the dataset with `pandas` which suggests some incorrect type inference is being made on the `datasets` side. For example, the stack trace indicates that some URL fields are being parsed as timestamps.\r\n\r\nYou can find a Colab notebook which reproduces the error [here](https:\/\/colab.research.google.com\/drive\/1YUCM0j1vx5ZrouQbYSzal6RwB4-Aoh4o?usp=sharing).\r\n\r\n**Edit:** If one repeatedly tries to load the dataset, it _eventually_ works but I think it would still be good to understand why it fails in the first place :)\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nfrom huggingface_hub import hf_hub_url\r\nimport pandas as pd\r\n\r\n# returns https:\/\/huggingface.co\/datasets\/lewtun\/github-issues-test\/resolve\/main\/issues-datasets.jsonl\r\ndata_files = hf_hub_url(repo_id=\"lewtun\/github-issues-test\", filename=\"issues-datasets.jsonl\", repo_type=\"dataset\")\r\n# throws ArrowNotImplementedError\r\ndset = load_dataset(\"json\", data_files=data_files, split=\"test\")\r\n# no problem with pandas ...\r\ndf = pd.read_json(data_files, orient=\"records\", lines=True)\r\ndf.head()\r\n```\r\n\r\n## Expected results\r\nI can load any line-separated JSON file, similar to `pandas`.\r\n\r\n## Actual results\r\n```\r\n---------------------------------------------------------------------------\r\nArrowNotImplementedError Traceback (most recent call last)\r\n in ()\r\n----> 1 dset = load_dataset(\"json\", data_files=data_files, split=\"test\")\r\n\r\n9 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowNotImplementedError: JSON conversion to struct, open_issues: int64, closed_issues: int64, state: timestamp[s], created_at: timestamp[s], updated_at: timestamp[s], due_on: timestamp[s], closed_at: timestamp[s]> is not supported\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.11\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2799\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2799\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2798","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2798\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2798\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2798\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2798","id":970493126,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEyNDM3ODc2","number":2798,"title":"Fix streaming zip files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-13T15:17:01Z","updated_at":"2021-08-16T14:16:50Z","closed_at":"2021-08-13T15:38:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2798","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2798","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2798.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2798.patch","merged_at":"2021-08-13T15:38:28Z"},"body":"Currently, streaming remote zip data files gives `FileNotFoundError` message:\r\n```python\r\ndata_files = f\"https:\/\/huggingface.co\/datasets\/albertvillanova\/datasets-tests-compression\/resolve\/main\/sample.zip\"\r\nds = load_dataset(\"json\", split=\"train\", data_files=data_files, streaming=True)\r\nnext(iter(ds))\r\n```\r\n\r\nThis PR fixes it by adding a glob string.\r\n\r\nThe corresponding test is implemented in PR #2786.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2798\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2798\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2797","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2797\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2797\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2797\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2797","id":970331634,"node_id":"MDU6SXNzdWU5NzAzMzE2MzQ=","number":2797,"title":"Make creating\/editing dataset cards easier, by editing on site and dumping info from test command.","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-13T11:54:49Z","updated_at":"2021-08-14T08:42:09Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nCreating and editing dataset cards should be but not that easy\r\n- If other else know Some information I don't know (bias of dataset, dataset curation, supported dataset, ...), he\/she should know the description on hf.co comes from README.md under github huggingface\/datasets\/datasets\/the dataset, and willing to make a pr to add or fix information.\r\n- Many information is also saved in `dataset_info.json` (citaion, description), but still need to write it down to README.md again.\r\n- Contributor need to pip install and start a local server just for tagging the dataset's size. And contributor may be creating the dataset on lab's server, which can't open a browser. \r\n- if any one proposes a new tag, it doesn't show in the list that another creator see. (a stackoverflow way may be ideal)\r\n- dataset card generator web app doesn't generate the necessary subsecion `Contributions` for us.\r\n\r\n**Describe the solution you'd like**\r\n- Everyone (or at least the author\/contributor) can edit the description, information, tags of the dataset, on hf.co website. Just like wikipedia+stackoverflow\r\n- We can infer the actual data size, citation, data instance, ... from `dataset_info.json` and `dataset.arrow` via `dataset-cli test`\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2797\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2797\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2796","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2796\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2796\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2796\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2796","id":970235846,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEyMjE1ODM2","number":2796,"title":"add cedr dataset","user":{"login":"naumov-al","id":22640075,"node_id":"MDQ6VXNlcjIyNjQwMDc1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22640075?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/naumov-al","html_url":"https:\/\/github.com\/naumov-al","followers_url":"https:\/\/api.github.com\/users\/naumov-al\/followers","following_url":"https:\/\/api.github.com\/users\/naumov-al\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/naumov-al\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/naumov-al\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/naumov-al\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/naumov-al\/orgs","repos_url":"https:\/\/api.github.com\/users\/naumov-al\/repos","events_url":"https:\/\/api.github.com\/users\/naumov-al\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/naumov-al\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-13T09:37:35Z","updated_at":"2021-08-27T16:01:36Z","closed_at":"2021-08-27T16:01:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2796","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2796","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2796.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2796.patch","merged_at":"2021-08-27T16:01:35Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2796\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2796\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2794","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2794\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2794\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2794\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2794","id":969728545,"node_id":"MDU6SXNzdWU5Njk3Mjg1NDU=","number":2794,"title":"Warnings and documentation about pickling incorrect","user":{"login":"mbforbes","id":1170062,"node_id":"MDQ6VXNlcjExNzAwNjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1170062?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mbforbes","html_url":"https:\/\/github.com\/mbforbes","followers_url":"https:\/\/api.github.com\/users\/mbforbes\/followers","following_url":"https:\/\/api.github.com\/users\/mbforbes\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mbforbes\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mbforbes\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mbforbes\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mbforbes\/orgs","repos_url":"https:\/\/api.github.com\/users\/mbforbes\/repos","events_url":"https:\/\/api.github.com\/users\/mbforbes\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mbforbes\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-12T23:09:13Z","updated_at":"2021-08-12T23:09:31Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI have a docs bug and a closely related docs enhancement suggestion!\r\n\r\n### Bug\r\n\r\nThe warning and documentation say \"either `dill` or `pickle`\" for fingerprinting. But it seems that `dill`, which is installed by `datasets` by default, _must_ work, or else the fingerprinting fails.\r\n\r\nWarning:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/450b9174765374111e5c6daab0ed294bc3d9b639\/src\/datasets\/fingerprint.py#L262\r\n\r\nDocs:\r\n\r\n> For a transform to be hashable, it needs to be pickleable using dill or pickle.\r\n> \u2013 [docs](https:\/\/huggingface.co\/docs\/datasets\/processing.html#fingerprinting)\r\n\r\nFor my code, `pickle` works, but `dill` fails. The `dill` failure has already been reported in https:\/\/github.com\/huggingface\/datasets\/issues\/2643. However, the `dill` failure causes a hashing failure in the datasets library, without any backing off to `pickle`. This implies that it's not the case that either `dill` **or** `pickle` can work, but that `dill` must work if it is installed. I think this is more accurate wording, since it is installed and used by default:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/c93525dc291346e54212567fa72d7d607befe937\/setup.py#L83\r\n\r\n... and the hashing will fail if it fails.\r\n\r\n### Enhancement\r\n\r\nI think it'd be very helpful to add to the documentation how to debug hashing failures. It took me a while to figure out how to diagnose this. There is a very nice two-liner by @lhoestq in https:\/\/github.com\/huggingface\/datasets\/issues\/2516#issuecomment-865173139:\r\n\r\n```python\r\nfrom datasets.fingerprint import Hasher\r\nHasher.hash(my_object)\r\n```\r\n\r\nI think add this to the docs will help future users quickly debug any hashing troubles of their own :-)\r\n\r\n## Steps to reproduce the bug\r\n\r\n`dill` but not `pickle` hashing failure in https:\/\/github.com\/huggingface\/datasets\/issues\/2643\r\n\r\n## Expected results\r\nIf either `dill` or `pickle` can successfully hash, the hashing will succeed.\r\n\r\n## Actual results\r\nIf `dill` or `pickle` cannot hash, the hashing fails.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-5.8.0-1038-gcp-x86_64-with-glibc2.31\r\n- Python version: 3.9.6\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2794\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2794\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2793","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2793\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2793\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2793\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2793","id":968967773,"node_id":"MDExOlB1bGxSZXF1ZXN0NzExMDQ4NDY2","number":2793,"title":"Fix type hint for data_files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-12T14:42:37Z","updated_at":"2021-08-12T15:35:29Z","closed_at":"2021-08-12T15:35:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2793","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2793","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2793.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2793.patch","merged_at":"2021-08-12T15:35:29Z"},"body":"Fix type hint for `data_files` in signatures and docstrings.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2793\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2793\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2792","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2792\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2792\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2792\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2792","id":968650274,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEwNzUyMjc0","number":2792,"title":"Update: GooAQ - add train\/val\/test splits","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-12T11:40:18Z","updated_at":"2021-08-27T15:58:45Z","closed_at":"2021-08-27T15:58:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2792","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2792","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2792.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2792.patch","merged_at":"2021-08-27T15:58:14Z"},"body":"[GooAQ](https:\/\/github.com\/allenai\/gooaq) dataset was recently updated after splits were added for the same. This PR contains new updated GooAQ with train\/val\/test splits and updated README as well.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2792\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2792\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2791","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2791\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2791\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2791\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2791","id":968360314,"node_id":"MDExOlB1bGxSZXF1ZXN0NzEwNDgxNDAy","number":2791,"title":"Fix typo in cnn_dailymail","user":{"login":"omaralsayed","id":42531544,"node_id":"MDQ6VXNlcjQyNTMxNTQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42531544?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/omaralsayed","html_url":"https:\/\/github.com\/omaralsayed","followers_url":"https:\/\/api.github.com\/users\/omaralsayed\/followers","following_url":"https:\/\/api.github.com\/users\/omaralsayed\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/omaralsayed\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/omaralsayed\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/omaralsayed\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/omaralsayed\/orgs","repos_url":"https:\/\/api.github.com\/users\/omaralsayed\/repos","events_url":"https:\/\/api.github.com\/users\/omaralsayed\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/omaralsayed\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-12T08:38:42Z","updated_at":"2021-08-12T11:17:59Z","closed_at":"2021-08-12T11:17:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2791","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2791","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2791.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2791.patch","merged_at":"2021-08-12T11:17:59Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2791\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2791\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2790","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2790\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2790\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2790\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2790","id":967772181,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA5OTI3NjM2","number":2790,"title":"Fix typo in test_dataset_common","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-12T01:10:29Z","updated_at":"2021-08-12T11:31:29Z","closed_at":"2021-08-12T11:31:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2790","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2790","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2790.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2790.patch","merged_at":"2021-08-12T11:31:29Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2790\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2790\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789","id":967361934,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA5NTQwMzY5","number":2789,"title":"Updated dataset description of DaNE","user":{"login":"KennethEnevoldsen","id":23721977,"node_id":"MDQ6VXNlcjIzNzIxOTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23721977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KennethEnevoldsen","html_url":"https:\/\/github.com\/KennethEnevoldsen","followers_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/followers","following_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/repos","events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-11T19:58:48Z","updated_at":"2021-08-12T16:10:59Z","closed_at":"2021-08-12T16:06:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2789","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789.patch","merged_at":"2021-08-12T16:06:01Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2788","id":967149389,"node_id":"MDU6SXNzdWU5NjcxNDkzODk=","number":2788,"title":"How to sample every file in a list of files making up a split in a dataset when loading?","user":{"login":"brijow","id":11220949,"node_id":"MDQ6VXNlcjExMjIwOTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11220949?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/brijow","html_url":"https:\/\/github.com\/brijow","followers_url":"https:\/\/api.github.com\/users\/brijow\/followers","following_url":"https:\/\/api.github.com\/users\/brijow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/brijow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/brijow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/brijow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/brijow\/orgs","repos_url":"https:\/\/api.github.com\/users\/brijow\/repos","events_url":"https:\/\/api.github.com\/users\/brijow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/brijow\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-11T17:43:21Z","updated_at":"2021-08-23T17:12:22Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am loading a dataset with multiple train, test, and validation files like this:\r\n\r\n```\r\ndata_files_dict = {\r\n \"train\": [train_file1, train_file2],\r\n \"test\": [test_file1, test_file2],\r\n \"val\": [val_file1, val_file2]\r\n}\r\ndataset = datasets.load_dataset(\r\n \"csv\",\r\n data_files=data_files_dict,\r\n split=['train[:8]', 'test[:8]', 'val[:8]']\r\n)\r\n\r\n```\r\n\r\nHowever, this only selects the first 8 rows from train_file1, test_file1, val_file1, since they are the first files in the lists.\r\n\r\nI'm trying to formulate a split argument that can sample from each file specified in my list of files that make up each split.\r\n\r\nIs this type of splitting supported? If so, how can I do it?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2787","id":967018406,"node_id":"MDU6SXNzdWU5NjcwMTg0MDY=","number":2787,"title":"ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com","user":{"login":"jinec","id":39627475,"node_id":"MDQ6VXNlcjM5NjI3NDc1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39627475?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jinec","html_url":"https:\/\/github.com\/jinec","followers_url":"https:\/\/api.github.com\/users\/jinec\/followers","following_url":"https:\/\/api.github.com\/users\/jinec\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jinec\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jinec\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jinec\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jinec\/orgs","repos_url":"https:\/\/api.github.com\/users\/jinec\/repos","events_url":"https:\/\/api.github.com\/users\/jinec\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jinec\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-08-11T16:19:01Z","updated_at":"2021-11-24T06:25:38Z","closed_at":"2021-08-18T15:09:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\nI am trying to run run_glue.py and it gives me this error -\r\n\r\nTraceback (most recent call last):\r\n File \"E:\/BERT\/pytorch_hugging\/transformers\/examples\/pytorch\/text-classification\/run_glue.py\", line 546, in \r\n main()\r\n File \"E:\/BERT\/pytorch_hugging\/transformers\/examples\/pytorch\/text-classification\/run_glue.py\", line 250, in main\r\n datasets = load_dataset(\"glue\", data_args.task_name, cache_dir=model_args.cache_dir)\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\load.py\", line 718, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\load.py\", line 320, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\utils\\file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\utils\\file_utils.py\", line 623, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.7.0\/datasets\/glue\/glue.py\r\n\r\nTrying to do python run_glue.py --model_name_or_path\r\nbert-base-cased\r\n--task_name\r\nmrpc\r\n--do_train\r\n--do_eval\r\n--max_seq_length\r\n128\r\n--per_device_train_batch_size\r\n32\r\n--learning_rate\r\n2e-5\r\n--num_train_epochs\r\n3\r\n--output_dir\r\n.\/tmp\/mrpc\/\r\n\r\nIs this something on my end? From what I can tell, this was re-fixeded by @fullyz a few months ago.\r\nThank you!\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786","id":966282934,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA4NTQwMzU0","number":2786,"title":"Support streaming compressed files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-11T09:02:06Z","updated_at":"2021-08-17T05:28:39Z","closed_at":"2021-08-16T06:36:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2786","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786.patch","merged_at":"2021-08-16T06:36:19Z"},"body":"Add support to stream compressed files (current options in fsspec):\r\n- bz2\r\n- lz4\r\n- xz\r\n- zstd\r\n\r\ncc: @lewtun ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783","id":965461382,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3NzcxOTM3","number":2783,"title":"Add KS task to SUPERB","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-08-10T22:14:07Z","updated_at":"2021-08-12T16:45:01Z","closed_at":"2021-08-11T20:19:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2783","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783.patch","merged_at":"2021-08-11T20:19:17Z"},"body":"Add the KS (keyword spotting) task as described in the [SUPERB paper](https:\/\/arxiv.org\/abs\/2105.01051).\r\n\r\n- [s3prl instructions](https:\/\/github.com\/s3prl\/s3prl\/blob\/master\/s3prl\/downstream\/README.md#ks-keyword-spotting)\r\n- [s3prl implementation](https:\/\/github.com\/s3prl\/s3prl\/blob\/master\/s3prl\/downstream\/speech_commands\/dataset.py)\r\n- [TFDS implementation](https:\/\/github.com\/tensorflow\/datasets\/blob\/master\/tensorflow_datasets\/audio\/speech_commands.py)\r\n\r\nSome notable quirks:\r\n- The dataset is originally single-archive (train+val+test all in one), but the test set has a \"canonical\" distribution in a separate archive, which is also used here (see `_split_ks_files()`). \r\n- The `_background_noise_`\/`_silence_` audio files are much longer than others, so they require some sort of slicing for downstream training. I decided to leave the implementation of that up to the users, since TFDS and s3prl take different approaches (either slicing wavs deterministically, or subsampling randomly at runtime)\r\n\r\nRelated to #2619.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":3,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782","id":964858439,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MjQ5NDE5","number":2782,"title":"Fix renaming of corpus_bleu args","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T11:02:34Z","updated_at":"2021-08-10T11:16:07Z","closed_at":"2021-08-10T11:16:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2782","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782.patch","merged_at":"2021-08-10T11:16:07Z"},"body":"Last `sacrebleu` release (v2.0.0) has renamed `sacrebleu.corpus_bleu` args from `(sys_stream, ref_streams)` to `(hipotheses, references)`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR passes the args without parameter names, so that it is valid for all versions of `sacrebleu`.\r\n\r\nThis is a partial hotfix of #2781.\r\n\r\nClose #2781.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2781","id":964805351,"node_id":"MDU6SXNzdWU5NjQ4MDUzNTE=","number":2781,"title":"Latest v2.0.0 release of sacrebleu has broken some metrics","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-08-10T09:59:41Z","updated_at":"2021-08-10T11:16:07Z","closed_at":"2021-08-10T11:16:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAfter `sacrebleu` v2.0.0 release (see changes here: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15), some of `datasets` metrics are broken:\r\n- Default tokenizer `sacrebleu.DEFAULT_TOKENIZER` no longer exists:\r\n - #2739\r\n - #2778\r\n- Bleu tokenizers are no longer accessible with `sacrebleu.TOKENIZERS`:\r\n - #2779\r\n- `corpus_bleu` args have been renamed from `(sys_stream, ref_streams)` to `(hipotheses, references)`: \r\n - #2782 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780","id":964794764,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTk2NjA3","number":2780,"title":"VIVOS dataset for Vietnamese ASR","user":{"login":"binh234","id":57580923,"node_id":"MDQ6VXNlcjU3NTgwOTIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57580923?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/binh234","html_url":"https:\/\/github.com\/binh234","followers_url":"https:\/\/api.github.com\/users\/binh234\/followers","following_url":"https:\/\/api.github.com\/users\/binh234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/binh234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/binh234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/binh234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/binh234\/orgs","repos_url":"https:\/\/api.github.com\/users\/binh234\/repos","events_url":"https:\/\/api.github.com\/users\/binh234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/binh234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T09:47:36Z","updated_at":"2021-08-12T11:09:30Z","closed_at":"2021-08-12T11:09:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2780","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780.patch","merged_at":"2021-08-12T11:09:30Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779","id":964775085,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTgwNTgw","number":2779,"title":"Fix sacrebleu tokenizers","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T09:24:27Z","updated_at":"2021-08-10T11:03:08Z","closed_at":"2021-08-10T10:57:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2779","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779.patch","merged_at":"2021-08-10T10:57:54Z"},"body":"Last `sacrebleu` release (v2.0.0) has removed `sacrebleu.TOKENIZERS`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR makes a hot fix of the bug by using a private function in `sacrebleu`: `sacrebleu.metrics.bleu._get_tokenizer()`.\r\n\r\nEventually, this should be further fixed in order to use only public functions.\r\n\r\nThis is a partial hotfix of #2781.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778","id":964737422,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTQ5MTk2","number":2778,"title":"Do not pass tokenize to sacrebleu","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T08:40:37Z","updated_at":"2021-08-10T10:03:37Z","closed_at":"2021-08-10T10:03:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2778","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778.patch","merged_at":"2021-08-10T10:03:37Z"},"body":"Last `sacrebleu` release (v2.0.0) has removed `sacrebleu.DEFAULT_TOKENIZER`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR does not pass `tokenize` to `sacrebleu` (note that the user cannot pass it anyway) and `sacrebleu` will use its default, no matter where it is and how it is called.\r\n\r\nRelated to #2739.\r\n\r\nThis is a partial hotfix of #2781.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777","id":964696380,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTEzNzg3","number":2777,"title":"Use packaging to handle versions","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T07:51:39Z","updated_at":"2021-08-18T13:56:27Z","closed_at":"2021-08-18T13:56:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2777","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777.patch","merged_at":"2021-08-18T13:56:27Z"},"body":"Use packaging module to handle\/validate\/check versions of Python packages.\r\n\r\nRelated to #2769.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2776","id":964400596,"node_id":"MDU6SXNzdWU5NjQ0MDA1OTY=","number":2776,"title":"document `config.HF_DATASETS_OFFLINE` and precedence","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-09T21:23:17Z","updated_at":"2021-08-09T21:23:17Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976 implemented `HF_DATASETS_OFFLINE`, but:\r\n1. `config.HF_DATASETS_OFFLINE` is not documented\r\n2. the precedence is not documented (env, config)\r\n\r\nI'm thinking it probably should be similar to what it says https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#from-the-huggingface-hub about `datasets.config.IN_MEMORY_MAX_SIZE`:\r\n\r\nQuote:\r\n> The default in \ud83e\udd17 Datasets is to memory-map the dataset on disk unless you set datasets.config.IN_MEMORY_MAX_SIZE different from 0 bytes (default). In that case, the dataset will be copied in-memory if its size is smaller than datasets.config.IN_MEMORY_MAX_SIZE bytes, and memory-mapped otherwise. This behavior can be enabled by setting either the configuration option datasets.config.IN_MEMORY_MAX_SIZE (higher precedence) or the environment variable HF_DATASETS_IN_MEMORY_MAX_SIZE (lower precedence) to nonzero.\r\n\r\nContext: trying to use `config.HF_DATASETS_OFFLINE` here:\r\nhttps:\/\/github.com\/bigscience-workshop\/Megatron-DeepSpeed\/pull\/48\r\nbut are uncertain if it's safe, since it's not documented as a public API.\r\n\r\nThank you!\r\n\r\n@lhoestq, @albertvillanova ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2775","id":964303626,"node_id":"MDU6SXNzdWU5NjQzMDM2MjY=","number":2775,"title":"`generate_random_fingerprint()` deterministic with \ud83e\udd17Transformers' `set_seed()`","user":{"login":"mbforbes","id":1170062,"node_id":"MDQ6VXNlcjExNzAwNjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1170062?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mbforbes","html_url":"https:\/\/github.com\/mbforbes","followers_url":"https:\/\/api.github.com\/users\/mbforbes\/followers","following_url":"https:\/\/api.github.com\/users\/mbforbes\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mbforbes\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mbforbes\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mbforbes\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mbforbes\/orgs","repos_url":"https:\/\/api.github.com\/users\/mbforbes\/repos","events_url":"https:\/\/api.github.com\/users\/mbforbes\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mbforbes\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-08-09T19:28:51Z","updated_at":"2021-08-26T08:30:54Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n**Update:** I dug into this to try to reproduce the underlying issue, and I believe it's that `set_seed()` from the `transformers` library makes the \"random\" fingerprint identical each time. I believe this is still a bug, because `datasets` is used exactly this way in `transformers` after `set_seed()` has been called, and I think that using `set_seed()` is a standard procedure to aid reproducibility. I've added more details to reproduce this below.\r\n\r\nHi there! I'm using my own local dataset and custom preprocessing function. My preprocessing function seems to be unpickle-able, perhaps because it is from a closure (will debug this separately). I get this warning, which is expected:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/450b9174765374111e5c6daab0ed294bc3d9b639\/src\/datasets\/fingerprint.py#L260-L265\r\n\r\nHowever, what's not expected is that the `datasets` actually _does_ seem to cache and reuse this dataset between runs! After that line, the next thing that's logged looks like:\r\n\r\n```text\r\n Loading cached processed dataset at \/home\/xxx\/.cache\/huggingface\/datasets\/csv\/default-xxx\/0.0.0\/xxx\/cache-xxx.arrow\r\n```\r\n\r\nThe path is exactly the same each run (e.g., last 26 runs).\r\n\r\nThis becomes a problem because I'll pass in the `--max_eval_samples` flag to the HuggingFace example script I'm running off of ([run_swag.py](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/pytorch\/multiple-choice\/run_swag.py)). The fact that the cached dataset is reused means this flag gets ignored. I'll try to load 100 examples, and it will load the full cached 1,000,000.\r\n\r\nI think that\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/450b9174765374111e5c6daab0ed294bc3d9b639\/src\/datasets\/fingerprint.py#L248\r\n\r\n... is actually consistent because randomness is being controlled in HuggingFace\/Transformers for reproducibility. I've added a demo of this below.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\n# Contents of print_fingerprint.py\r\nfrom transformers import set_seed\r\nfrom datasets.fingerprint import generate_random_fingerprint\r\nset_seed(42)\r\nprint(generate_random_fingerprint())\r\n```\r\n\r\n```bash\r\nfor i in {0..10}; do\r\n python print_fingerprint.py\r\ndone\r\n\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n```\r\n\r\n## Expected results\r\nAfter the \"random hash\" warning is emitted, a random hash is generated, and no outdated cached datasets are reused.\r\n\r\n## Actual results\r\nAfter the \"random hash\" warning is emitted, an identical hash is generated each time, and an outdated cached dataset is reused each run.\r\n\r\n## Environment info\r\n\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-5.8.0-1038-gcp-x86_64-with-glibc2.31\r\n- Python version: 3.9.6\r\n- PyArrow version: 4.0.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774","id":963932199,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA2NDY2MDc0","number":2774,"title":"Prevent .map from using multiprocessing when loading from cache","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-08-09T12:11:38Z","updated_at":"2021-09-09T10:20:28Z","closed_at":"2021-09-09T10:20:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2774","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774.patch","merged_at":"2021-09-09T10:20:28Z"},"body":"## Context\r\n\r\nOn our setup, we use different setup to train vs proprocessing datasets. Usually we are able to obtain a high number of cpus to preprocess, which allows us to use `num_proc` however we can't use as many during training phase. Currently if we use `num_proc={whatever the preprocessing value was}` we load from cache, but we get:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/pool.py\", line 131, in worker\r\n put((job, i, result))\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/queues.py\", line 371, in put\r\n self._writer.send_bytes(obj)\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/connection.py\", line 203, in send_bytes\r\n self._send_bytes(m[offset:offset + size])\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/connection.py\", line 414, in _send_bytes\r\n self._send(header + buf)\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/connection.py\", line 371, in _send\r\n n = write(self._handle, buf)\r\nBrokenPipeError: [Errno 32] Broken pipe\r\n```\r\n\r\nOur current guess, is that we're spawning too many processes compared to the number of cpus available, and it's running OOM. Also we're loading this in DDP setting which means that for each gpu, I need to spawn a high number of processes to match the preprocessing fingerprint.\r\n\r\nInstead what we suggest:\r\n - Allow loading shard sequentially, sharing the same fingerprint as the multiprocessed one, in order to leverage multiprocessing when we actually generate the cache, and remove it when loading from cache.\r\n\r\n## Current issues\r\n\r\n~I'm having a hard time making fingerprints match. For some reason, the multiprocessing and the sequential version generate two different hash.~\r\n\r\n**EDIT**: Turns out multiprocessing and sequential have different `transform` value for fingerprinting (check `fingerprint_transform`) when running `_map_single`:\r\n - sequential : `datasets.arrow_dataset.Dataset._map_single`\r\n - multiprocessing: `datasets.arrow_dataset._map_single`\r\n \r\n This discrepancy is caused by multiprocessing pickling the transformer function, it doesn't seem to keep the `Dataset` hierarchy. I'm still unclear on why `func.__qual_name__` isn't handled correctly in multiprocessing. But replacing `__qualname__` by `__name__` fixes the issue.\r\n\r\n## What was done\r\n\r\n~We try to prevent the usage of multiprocessing when loading a dataset. Instead we load all cached shards sequentially.~\r\n\r\nI couldn't find a nice way to obtain the cached_file_name and check they all exist before deciding to use the multiprocessing flow or not. Instead I expose an optional boolean `sequential` in `map` method.\r\n\r\n## TODO\r\n - [x] Check that the multiprocessed version and the sequential version output the same output\r\n - [x] Check that sequential can load multiprocessed\r\n - [x] Check that multiprocessed can load sequential\r\n \r\n ## Test\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom multiprocessing import Pool\r\nimport random\r\n\r\ndef process(batch, rng):\r\n length = len(batch[\"text\"])\r\n return {**batch, \"processed_text\": [f\"PROCESSED {rng.random()}\" for _ in range(length)]}\r\n\r\ndataset = load_dataset(\"stas\/openwebtext-10k\", split=\"train\")\r\nprint(dataset.column_names)\r\nprint(type(dataset))\r\n\r\nrng = random.Random(42)\r\ndataset1 = dataset.map(process, batched=True, batch_size=50, num_proc=4, fn_kwargs={\"rng\": rng})\r\n\r\n# This one should be loaded from cache\r\nrng = random.Random(42)\r\ndataset2 = dataset.map(process, batched=True, batch_size=50, num_proc=4, fn_kwargs={\"rng\": rng}, sequential=True)\r\n\r\n# Just to check that the random generator was correct\r\nprint(dataset1[-1][\"processed_text\"])\r\nprint(dataset2[-1][\"processed_text\"])\r\n```\r\n \r\n ## Other solutions\r\n\r\nI chose to load everything sequentially, but we can probably find a way to load shards in parallel using another number of workers (essentially this would be an argument not used for fingerprinting, allowing to allow `m` shards using `n` processes, which would be very useful when same dataset have to be loaded on two different setup, and we still want to leverage cache).\r\n\r\nAlso we can use a env variable similarly to `TOKENIZERS_PARALLELISM` as this seems generally setup related (though this changes slightly if we use multiprocessing).\r\ncc @lhoestq (since I had asked you previously on `num_proc` being used for fingerprinting). Don't know if this is acceptable.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2773","id":963730497,"node_id":"MDU6SXNzdWU5NjM3MzA0OTc=","number":2773,"title":"Remove dataset_infos.json","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-09T07:43:19Z","updated_at":"2021-08-09T07:43:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nAs discussed, there are infos in the `dataset_infos.json` which are redundant and we could have them only in the README file.\r\n\r\nOthers could be migrated to the README, like: \"dataset_size\", \"size_in_bytes\", \"download_size\", \"splits.split_name.[num_bytes, num_examples]\",...\r\n\r\nHowever, there are others that do not seem too meaningful in the README, like the checksums.\r\n\r\n**Describe the solution you'd like**\r\nOpen a discussion to decide what to do with the `dataset_infos.json` files: which information to be migrated and\/or which information to be kept.\r\n\r\ncc: @julien-c @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2772","id":963348834,"node_id":"MDU6SXNzdWU5NjMzNDg4MzQ=","number":2772,"title":"Remove returned feature constrain","user":{"login":"PosoSAgapo","id":33200481,"node_id":"MDQ6VXNlcjMzMjAwNDgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33200481?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PosoSAgapo","html_url":"https:\/\/github.com\/PosoSAgapo","followers_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/followers","following_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/repos","events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-08T04:01:30Z","updated_at":"2021-08-08T08:48:01Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the current version, the returned value of the map function has to be list or ndarray. However, this makes it unsuitable for many tasks. In NLP, many features are sparse like verb words, noun chunks, if we want to assign different values to different words, which will result in a large sparse matrix if we only score useful words like verb words. \r\n\r\nMostly, when using it on large scale, saving it as a whole takes a lot of disk storage and making it hard to read, the normal method is saving it in sparse form. However, the NumPy does not support sparse, therefore I have to use PyTorch or scipy to transform a matrix into special sparse form, which is not a form that can be transformed into list or ndarry. This violates the feature constraints of the map function. \r\n\r\nI do appreciate the convenience of Datasets package, but I do not think the compulsory datatype constrain is necessary, in some cases, we just cannot transform it into a list or ndarray due to some reasons. Any way to fix this? Or what I can do to disable the compulsory datatype constrain?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771","id":963257036,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1OTExMDMw","number":2771,"title":"[WIP][Common Voice 7] Add common voice 7.0","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-07T16:01:10Z","updated_at":"2021-12-06T23:24:02Z","closed_at":"2021-12-06T23:24:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2771","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771.patch","merged_at":null},"body":"This PR allows to load the new common voice dataset manually as explained when doing: \r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\".\/datasets\/datasets\/common_voice_7\", \"ab\")\r\n```\r\n\r\n=>\r\n\r\n```\r\n Please follow the manual download instructions:\r\n\r\n You need to manually the dataset from `https:\/\/commonvoice.mozilla.org\/en\/datasets`.\r\n Make sure you choose the version `Common Voice Corpus 7.0`.\r\n Choose a language of your choice and find the corresponding language-id, *e.g.*, `Abkhaz` with language-id `ab`. The following language-ids are available:\r\n\r\n ['ab', 'ar', 'as', 'az', 'ba', 'bas', 'be', 'bg', 'br', 'ca', 'cnh', 'cs', 'cv', 'cy', 'de', 'dv', 'el', 'en', 'eo', 'es', 'et', 'eu', 'fa', 'fi', 'fr', 'fy-NL', 'ga-IE', 'gl', 'gn', 'ha', 'hi', 'hsb', 'hu', 'hy-AM', 'ia', 'id', 'it', 'ja', 'ka', 'kab', 'kk', 'kmr', 'ky', 'lg', 'lt', 'lv', 'mn', 'mt', 'nl', 'or', 'pa-IN', 'pl', 'pt', 'rm-sursilv', 'rm-vallader', 'ro', 'ru', 'rw', 'sah', 'sk', 'sl', 'sr', 'sv-SE', 'ta', 'th', 'tr', 'tt', 'ug', 'uk', 'ur', 'uz', 'vi', 'vot', 'zh-CN', 'zh-HK', 'zh-TW']\r\n\r\n Next, you will have to enter your email address to download the dataset in the `tar.gz` format. Save the file under .\r\n The file should then be extracted with: ``tar -xvzf `` which will extract a folder called ``cv-corpus-7.0-2021-07-21``.\r\n The dataset can then be loaded with `datasets.load_dataset(\"common_voice\", , data_dir=\"\", ignore_verifications=True).\r\n```\r\n\r\nHaving followed those instructions one can then download the data as follows: \r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\".\/datasets\/datasets\/common_voice_7\", \"ab\", data_dir=\".\/cv-corpus-7.0-2021-07-21\/\", ignore_verifications=True)\r\n```\r\n\r\n## TODO\r\n- [ ] Discuss naming. Is the name ok here \"common_voice_7\"? The dataset script differs only really in one point from `common_voice.py` in that all the metadata is different (more hours etc...) and that it has to use manual data dir for now\r\n- [ ] Ideally we should get a bundled download link. For `common_voice.py` there is a bundled download link: `https:\/\/voice-prod-bundler-ee1969a6ce8178826482b88e843c335139bd3fb4.s3.amazonaws.com\/cv-corpus-6.1-2020-12-11\/{}.tar.gz` that allows one to directly download the data. However such a link is missing for Common Voice 7. I guess we should try to contact common voice about it and ask whether we could host the data or help otherwise somehow. See: https:\/\/github.com\/common-voice\/common-voice-bundler\/issues\/15 cc @yjernite \r\n- [ ] I did not compute the dataset.json and it would mean that I'd have to download 76 datasets totalling around 1TB manually before running the checksum command. This just takes too much time. For now the user will have to add a `ignore_verifications=True` to download the data. This step would also be much easier if we could get a bundled link\r\n- [ ] Add dummy data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770","id":963246512,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1OTAzMzIy","number":2770,"title":"Add support for fast tokenizer in BertScore","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-07T15:00:03Z","updated_at":"2021-08-09T12:34:43Z","closed_at":"2021-08-09T11:16:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2770","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770.patch","merged_at":"2021-08-09T11:16:25Z"},"body":"This PR adds support for a fast tokenizer in BertScore, which has been added recently to the lib.\r\nFixes #2765 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769","id":963240802,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1ODk5MTYy","number":2769,"title":"Allow PyArrow from source","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-07T14:26:44Z","updated_at":"2021-08-09T15:38:39Z","closed_at":"2021-08-09T15:38:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2769","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769.patch","merged_at":"2021-08-09T15:38:39Z"},"body":"When installing pyarrow from source the version is:\r\n\r\n```python\r\n>>> import pyarrow; pyarrow.__version__\r\n'2.1.0.dev612'\r\n```\r\n\r\n-> however this breaks the install check at init of `datasets`. This PR makes sure that everything coming after the last `'.'` is removed.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2768","id":963229173,"node_id":"MDU6SXNzdWU5NjMyMjkxNzM=","number":2768,"title":"`ArrowInvalid: Added column's length must match table's length.` after using `select`","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-07T13:17:29Z","updated_at":"2021-08-09T11:26:43Z","closed_at":"2021-08-09T11:26:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI would like to add a column to a downsampled dataset. However I get an error message saying the length don't match with the length of the unsampled dataset indicated. I suspect that the dataset size is not updated when calling `select`.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\"tweets_hate_speech_detection\")['train'].select(range(128))\r\nds = ds.add_column('ones', [1]*128)\r\n```\r\n\r\n## Expected results\r\nI would expect a new column named `ones` filled with `1`. When I check the length of `ds` it says `128`. Interestingly, it works when calling `ds = ds.map(lambda x: x)` before adding the column.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n```python\r\n---------------------------------------------------------------------------\r\nArrowInvalid Traceback (most recent call last)\r\n\/var\/folders\/l4\/2905jygx4tx5jv8_kn03vxsw0000gn\/T\/ipykernel_6301\/868709636.py in \r\n 1 from datasets import load_dataset\r\n 2 ds = load_dataset(\"tweets_hate_speech_detection\")['train'].select(range(128))\r\n----> 3 ds = ds.add_column('ones', [0]*128)\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 183 }\r\n 184 # apply actual function\r\n--> 185 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 186 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 187 # re-apply format to the output\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 395 # Call actual function\r\n 396 \r\n--> 397 out = func(self, *args, **kwargs)\r\n 398 \r\n 399 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in add_column(self, name, column, new_fingerprint)\r\n 2965 column_table = InMemoryTable.from_pydict({name: column})\r\n 2966 # Concatenate tables horizontally\r\n-> 2967 table = ConcatenationTable.from_tables([self._data, column_table], axis=1)\r\n 2968 # Update features\r\n 2969 info = self.info.copy()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in from_tables(cls, tables, axis)\r\n 715 table_blocks = to_blocks(table)\r\n 716 blocks = _extend_blocks(blocks, table_blocks, axis=axis)\r\n--> 717 return cls.from_blocks(blocks)\r\n 718 \r\n 719 @property\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in from_blocks(cls, blocks)\r\n 663 return cls(table, blocks)\r\n 664 else:\r\n--> 665 table = cls._concat_blocks_horizontally_and_vertically(blocks)\r\n 666 return cls(table, blocks)\r\n 667 \r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in _concat_blocks_horizontally_and_vertically(cls, blocks)\r\n 623 if not tables:\r\n 624 continue\r\n--> 625 pa_table_horizontally_concatenated = cls._concat_blocks(tables, axis=1)\r\n 626 pa_tables_to_concat_vertically.append(pa_table_horizontally_concatenated)\r\n 627 return cls._concat_blocks(pa_tables_to_concat_vertically, axis=0)\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in _concat_blocks(blocks, axis)\r\n 612 else:\r\n 613 for name, col in zip(table.column_names, table.columns):\r\n--> 614 pa_table = pa_table.append_column(name, col)\r\n 615 return pa_table\r\n 616 else:\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.append_column()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.add_column()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowInvalid: Added column's length must match table's length. Expected length 31962 but got length 128\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.11.0\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2767","id":963002120,"node_id":"MDU6SXNzdWU5NjMwMDIxMjA=","number":2767,"title":"equal operation to perform unbatch for huggingface datasets ","user":{"login":"dorooddorood606","id":79288051,"node_id":"MDQ6VXNlcjc5Mjg4MDUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79288051?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorooddorood606","html_url":"https:\/\/github.com\/dorooddorood606","followers_url":"https:\/\/api.github.com\/users\/dorooddorood606\/followers","following_url":"https:\/\/api.github.com\/users\/dorooddorood606\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorooddorood606\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorooddorood606\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorooddorood606\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorooddorood606\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorooddorood606\/repos","events_url":"https:\/\/api.github.com\/users\/dorooddorood606\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorooddorood606\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-08-06T19:45:52Z","updated_at":"2021-08-07T19:56:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI need to use \"unbatch\" operation in tensorflow on a huggingface dataset, I could not find this operation, could you kindly direct me how I can do it, here is the problem I am trying to solve:\r\n\r\nI am considering \"record\" dataset in SuperGlue and I need to replicate each entery of the dataset for each answer, to make it similar to what T5 originally did:\r\n\r\nhttps:\/\/github.com\/google-research\/text-to-text-transfer-transformer\/blob\/3c58859b8fe72c2dbca6a43bc775aa510ba7e706\/t5\/data\/preprocessors.py#L925\r\n\r\nHere please find an example:\r\n\r\n For example, a typical example from ReCoRD might look like\r\n {\r\n 'passsage': 'This is the passage.',\r\n 'query': 'A @placeholder is a bird.',\r\n 'entities': ['penguin', 'potato', 'pigeon'],\r\n 'answers': ['penguin', 'pigeon'],\r\n }\r\n and I need a prosessor which would turn this example into the following two examples:\r\n {\r\n 'inputs': 'record query: A @placeholder is a bird. entities: penguin, '\r\n 'potato, pigeon passage: This is the passage.',\r\n 'targets': 'penguin',\r\n }\r\n and\r\n {\r\n 'inputs': 'record query: A @placeholder is a bird. entities: penguin, '\r\n 'potato, pigeon passage: This is the passage.',\r\n 'targets': 'pigeon',\r\n }\r\n\r\n\r\nFor doing this, one need unbatch, as each entry can map to multiple samples depending on the number of answers, I am not sure how to perform this operation with huggingface datasets library and greatly appreciate your help\r\n\r\n@lhoestq \r\n\r\nThank you very much.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766","id":962994198,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1NzAyNjM5","number":2766,"title":"fix typo (ShuffingConfig -> ShufflingConfig)","user":{"login":"daleevans","id":4944007,"node_id":"MDQ6VXNlcjQ5NDQwMDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944007?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/daleevans","html_url":"https:\/\/github.com\/daleevans","followers_url":"https:\/\/api.github.com\/users\/daleevans\/followers","following_url":"https:\/\/api.github.com\/users\/daleevans\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/daleevans\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/daleevans\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/daleevans\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/daleevans\/orgs","repos_url":"https:\/\/api.github.com\/users\/daleevans\/repos","events_url":"https:\/\/api.github.com\/users\/daleevans\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/daleevans\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-06T19:31:40Z","updated_at":"2021-08-10T14:17:03Z","closed_at":"2021-08-10T14:17:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2766","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766.patch","merged_at":"2021-08-10T14:17:02Z"},"body":"pretty straightforward, it should be Shuffling instead of Shuffing","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2765","id":962861395,"node_id":"MDU6SXNzdWU5NjI4NjEzOTU=","number":2765,"title":"BERTScore Error","user":{"login":"gagan3012","id":49101362,"node_id":"MDQ6VXNlcjQ5MTAxMzYy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49101362?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gagan3012","html_url":"https:\/\/github.com\/gagan3012","followers_url":"https:\/\/api.github.com\/users\/gagan3012\/followers","following_url":"https:\/\/api.github.com\/users\/gagan3012\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gagan3012\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gagan3012\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gagan3012\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gagan3012\/orgs","repos_url":"https:\/\/api.github.com\/users\/gagan3012\/repos","events_url":"https:\/\/api.github.com\/users\/gagan3012\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gagan3012\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-06T15:58:57Z","updated_at":"2021-08-09T11:16:25Z","closed_at":"2021-08-09T11:16:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\npredictions = [\"hello there\", \"general kenobi\"]\r\nreferences = [\"hello there\", \"general kenobi\"]\r\nbert = load_metric('bertscore')\r\nbert.compute(predictions=predictions, references=references,lang='en')\r\n```\r\n\r\n# Bug\r\n`TypeError: get_hash() missing 1 required positional argument: 'use_fast_tokenizer'`\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform: Colab \r\n- Python version:\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764","id":962554799,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1MzI3MDQ5","number":2764,"title":"Add DER metric for SUPERB speaker diarization task","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-06T09:12:36Z","updated_at":"2021-08-06T10:06:53Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2764","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2763","id":961895523,"node_id":"MDU6SXNzdWU5NjE4OTU1MjM=","number":2763,"title":"English wikipedia datasets is not clean","user":{"login":"lucadiliello","id":23355969,"node_id":"MDQ6VXNlcjIzMzU1OTY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23355969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucadiliello","html_url":"https:\/\/github.com\/lucadiliello","followers_url":"https:\/\/api.github.com\/users\/lucadiliello\/followers","following_url":"https:\/\/api.github.com\/users\/lucadiliello\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucadiliello\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucadiliello\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucadiliello\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucadiliello\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucadiliello\/repos","events_url":"https:\/\/api.github.com\/users\/lucadiliello\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucadiliello\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-05T14:37:24Z","updated_at":"2021-08-23T17:00:16Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWikipedia english dumps contain many wikipedia paragraphs like \"References\", \"Category:\" and \"See Also\" that should not be used for training.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nfrom datasets import load_dataset\r\nw = load_dataset('wikipedia', '20200501.en')\r\nprint(w['train'][0]['text'])\r\n```\r\n\r\n> 'Yangliuqing () is a market town in Xiqing District, in the western suburbs of Tianjin, People\\'s Republic of China. Despite its relatively small size, it has been named since 2006 in the \"famous historical and cultural market towns in China\".\\n\\nIt is best known in China for creating nianhua or Yangliuqing nianhua. For more than 400 years, Yangliuqing has in effect specialised in the creation of these woodcuts for the New Year. wood block prints using vivid colourschemes to portray traditional scenes of children\\'s games often interwoven with auspiciouse objects.\\n\\n, it had 27 residential communities () and 25 villages under its administration.\\n\\nShi Family Grand Courtyard\\n\\nShi Family Grand Courtyard (Ti\u0101nj\u012bn Sh\u00ed Ji\u0101 D\u00e0 Yu\u00e0n, \u5929\u6d25\u77f3\u5bb6\u5927\u9662) is situated in Yangliuqing Town of Xiqing District, which is the former residence of wealthy merchant Shi Yuanshi - the 4th son of Shi Wancheng, one of the eight great masters in Tianjin. First built in 1875, it covers over 6,000 square meters, including large and small yards and over 200 folk houses, a theater and over 275 rooms that served as apartments and places of business and worship for this powerful family. Shifu Garden, which finished its expansion in October 2003, covers 1,200 square meters, incorporates the elegance of imperial garden and delicacy of south garden. Now the courtyard of Shi family covers about 10,000 square meters, which is called the first mansion in North China. Now it serves as the folk custom museum in Yangliuqing, which has a large collection of folk custom museum in Yanliuqing, which has a large collection of folk art pieces like Yanliuqing New Year pictures, brick sculpture.\\n\\nShi\\'s ancestor came from Dong\\'e County in Shandong Province, engaged in water transport of grain. As the wealth gradually accumulated, the Shi Family moved to Yangliuqing and bought large tracts of land and set up their residence. Shi Yuanshi came from the fourth generation of the family, who was a successful businessman and a good household manager, and the residence was thus enlarged for several times until it acquired the present scale. It is believed to be the first mansion in the west of Tianjin.\\n\\nThe residence is symmetric based on the axis formed by a passageway in the middle, on which there are four archways. On the east side of the courtyard, there are traditional single-story houses with rows of rooms around the four sides, which was once the living area for the Shi Family. The rooms on north side were the accountants\\' office. On the west are the major constructions including the family hall for worshipping Buddha, theater and the south reception room. On both sides of the residence are side yard rooms for maids and servants.\\n\\nToday, the Shi mansion, located in the township of Yangliuqing to the west of central Tianjin, stands as a surprisingly well-preserved monument to China\\'s pre-revolution mercantile spirit. It also serves as an on-location shoot for many of China\\'s popular historical dramas. Many of the rooms feature period furniture, paintings and calligraphy, and the extensive Shifu Garden.\\n\\nPart of the complex has been turned into the Yangliuqing Museum, which includes displays focused on symbolic aspects of the courtyards\\' construction, local folk art and customs, and traditional period furnishings and crafts.\\n\\n**See also \\n\\nList of township-level divisions of Tianjin\\n\\nReferences \\n\\n http:\/\/arts.cultural-china.com\/en\/65Arts4795.html\\n\\nCategory:Towns in Tianjin'**\r\n\r\n## Expected results\r\nI expect no junk in the data.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n- `datasets` version: 1.10.2\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2762","id":961652046,"node_id":"MDU6SXNzdWU5NjE2NTIwNDY=","number":2762,"title":"Add RVL-CDIP dataset","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-05T09:57:05Z","updated_at":"2021-12-08T12:05:46Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** RVL-CDIP\r\n- **Description:** The RVL-CDIP (Ryerson Vision Lab Complex Document Information Processing) dataset consists of 400,000 grayscale images in 16 classes, with 25,000 images per class. There are 320,000 training images, 40,000 validation images, and 40,000 test images. The images are sized so their largest dimension does not exceed 1000 pixels.\r\n- **Paper:** https:\/\/www.cs.cmu.edu\/~aharley\/icdar15\/\r\n- **Data:** https:\/\/www.cs.cmu.edu\/~aharley\/rvl-cdip\/\r\n- **Motivation:** I'm currently adding LayoutLMv2 and LayoutXLM to HuggingFace Transformers. LayoutLM (v1) already exists in the library. This dataset has a large value for document image classification (i.e. classifying scanned documents). LayoutLM models obtain SOTA on this dataset, so would be great to directly use it in notebooks.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2761","id":961568287,"node_id":"MDU6SXNzdWU5NjE1NjgyODc=","number":2761,"title":"Error loading C4 realnewslike dataset","user":{"login":"danshirron","id":32061512,"node_id":"MDQ6VXNlcjMyMDYxNTEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32061512?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danshirron","html_url":"https:\/\/github.com\/danshirron","followers_url":"https:\/\/api.github.com\/users\/danshirron\/followers","following_url":"https:\/\/api.github.com\/users\/danshirron\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danshirron\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danshirron\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danshirron\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danshirron\/orgs","repos_url":"https:\/\/api.github.com\/users\/danshirron\/repos","events_url":"https:\/\/api.github.com\/users\/danshirron\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danshirron\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-08-05T08:16:58Z","updated_at":"2021-08-08T19:44:34Z","closed_at":"2021-08-08T19:44:34Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nError loading C4 realnewslike dataset. Validation part mismatch\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n raw_datasets = load_dataset('c4', 'realnewslike', cache_dir=model_args.cache_dir)\r\n## Expected results\r\nsuccess on data loading\r\n## Actual results\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 15.3M\/15.3M [00:00<00:00, 28.1MB\/s]Traceback (most recent call last): \r\n File \"run_mlm_tf.py\", line 794, in \r\n main() \r\n File \"run_mlm_tf.py\", line 425, in main \r\n raw_datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name, cache_dir=model_args.cache_dir) File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 843, in load_dataset \r\n builder_instance.download_and_prepare( \r\n File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 608, in download_and_prepare \r\n self._download_and_prepare( \r\n File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 698, in _download_and_prepare verify_splits(self.info.splits, split_dict) File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 74, in verify_splits \r\n raise NonMatchingSplitsSizesError(str(bad_splits)) \r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='validation', num_bytes=38165657946, num_examples=13799838, dataset_name='c4'), 'recorded': SplitInfo(name='validation', num_bytes=37875873, num_examples=13863, dataset_name='c4')}] \r\n\r\n## Environment info\r\n- `datasets` version: 1.10.2\r\n- Platform: Linux-5.4.0-58-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2760","id":961372667,"node_id":"MDU6SXNzdWU5NjEzNzI2Njc=","number":2760,"title":"Add Nuswide dataset","user":{"login":"shivangibithel","id":19774925,"node_id":"MDQ6VXNlcjE5Nzc0OTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19774925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shivangibithel","html_url":"https:\/\/github.com\/shivangibithel","followers_url":"https:\/\/api.github.com\/users\/shivangibithel\/followers","following_url":"https:\/\/api.github.com\/users\/shivangibithel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shivangibithel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shivangibithel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shivangibithel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shivangibithel\/orgs","repos_url":"https:\/\/api.github.com\/users\/shivangibithel\/repos","events_url":"https:\/\/api.github.com\/users\/shivangibithel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shivangibithel\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-05T03:00:41Z","updated_at":"2021-12-08T12:06:23Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *NUSWIDE*\r\n- **Description:** *[A Real-World Web Image Dataset from National University of Singapore](https:\/\/lms.comp.nus.edu.sg\/wp-content\/uploads\/2019\/research\/nuswide\/NUS-WIDE.html)*\r\n- **Paper:** *[here](https:\/\/lms.comp.nus.edu.sg\/wp-content\/uploads\/2019\/research\/nuswide\/nuswide-civr2009.pdf)*\r\n- **Data:** *[here](https:\/\/github.com\/wenting-zhao\/nuswide)*\r\n- **Motivation:** *This dataset is a benchmark in the Text Retrieval task.*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2759","id":960636572,"node_id":"MDU6SXNzdWU5NjA2MzY1NzI=","number":2759,"title":"the meteor metric seems not consist with the official version","user":{"login":"jianguda","id":9079360,"node_id":"MDQ6VXNlcjkwNzkzNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9079360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jianguda","html_url":"https:\/\/github.com\/jianguda","followers_url":"https:\/\/api.github.com\/users\/jianguda\/followers","following_url":"https:\/\/api.github.com\/users\/jianguda\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jianguda\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jianguda\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jianguda\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jianguda\/orgs","repos_url":"https:\/\/api.github.com\/users\/jianguda\/repos","events_url":"https:\/\/api.github.com\/users\/jianguda\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jianguda\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2067393914,"node_id":"MDU6TGFiZWwyMDY3MzkzOTE0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20bug","name":"metric bug","color":"25b21e","default":false,"description":"A bug in a metric script"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-04T15:33:17Z","updated_at":"2022-01-19T14:17:49Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe computed meteor score seems strange because the value is very different from the scores computed by other tools. For example, I use the meteor score computed by [NLGeval](https:\/\/github.com\/Maluuba\/nlg-eval) as the reference (which reuses the official jar file for the computation)\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_metric\r\nfrom nlgeval import NLGEval, compute_individual_metrics\r\n\r\nmeteor = load_metric('meteor')\r\npredictions = [\"It is a guide to action which ensures that the military always obeys the commands of the party\"]\r\nreferences = [\"It is a guide to action that ensures that the military will forever heed Party commands\"]\r\nresults = meteor.compute(predictions=predictions, references=references)\r\n# print the actual result\r\nprint(round(results[\"meteor\"], 4))\r\nmetrics_dict = compute_individual_metrics(references, predictions[0])\r\n# print the expected result\r\nprint(round(metrics_dict[\"METEOR\"], 4))\r\n```\r\nBy the way, you need to install the `nlg-eval` library first. Please check the installation guide [here](https:\/\/github.com\/Maluuba\/nlg-eval#setup), thanks!\r\n\r\n## Expected results\r\n`0.4474`\r\n\r\n## Actual results\r\n`0.7398`\r\n\r\n## Environment info\r\n- `datasets` version: 1.10.2\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758","id":960206575,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAzMjQ5Nzky","number":2758,"title":"Raise ManualDownloadError when loading a dataset that requires previous manual download","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-04T10:19:55Z","updated_at":"2021-08-04T11:36:30Z","closed_at":"2021-08-04T11:36:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2758","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758.patch","merged_at":"2021-08-04T11:36:30Z"},"body":"This PR implements the raising of a `ManualDownloadError` when loading a dataset that requires previous manual download, and this is missing.\r\n\r\nThe `ManualDownloadError` is raised whether the dataset is loaded in normal or streaming mode.\r\n\r\nClose #2749.\r\n\r\ncc: @severo ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2757","id":959984081,"node_id":"MDU6SXNzdWU5NTk5ODQwODE=","number":2757,"title":"Unexpected type after `concatenate_datasets`","user":{"login":"JulesBelveze","id":32683010,"node_id":"MDQ6VXNlcjMyNjgzMDEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32683010?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JulesBelveze","html_url":"https:\/\/github.com\/JulesBelveze","followers_url":"https:\/\/api.github.com\/users\/JulesBelveze\/followers","following_url":"https:\/\/api.github.com\/users\/JulesBelveze\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JulesBelveze\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JulesBelveze\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JulesBelveze\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JulesBelveze\/orgs","repos_url":"https:\/\/api.github.com\/users\/JulesBelveze\/repos","events_url":"https:\/\/api.github.com\/users\/JulesBelveze\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JulesBelveze\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-08-04T07:10:39Z","updated_at":"2021-08-04T16:01:24Z","closed_at":"2021-08-04T16:01:23Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to concatenate two `Dataset` using `concatenate_datasets` but it turns out that after concatenation the features are casted from `torch.Tensor` to `list`. \r\nIt then leads to a weird tensors when trying to convert it to a `DataLoader`. However, if I use each `Dataset` separately everything behave as expected.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n>>> featurized_teacher\r\nDataset({\r\n features: ['t_labels', 't_input_ids', 't_token_type_ids', 't_attention_mask'],\r\n num_rows: 502\r\n})\r\n>>> for f in featurized_teacher.features:\r\n print(featurized_teacher[f].shape)\r\ntorch.Size([502])\r\ntorch.Size([502, 300])\r\ntorch.Size([502, 300])\r\ntorch.Size([502, 300])\r\n\r\n>>> featurized_student\r\nDataset({\r\n features: ['s_features', 's_labels'],\r\n num_rows: 502\r\n})\r\n>>> for f in featurized_student.features:\r\n print(featurized_student[f].shape)\r\ntorch.Size([502, 64])\r\ntorch.Size([502])\r\n```\r\nThe shapes seem alright to me. Then the results after concatenation are as follow:\r\n```python\r\n>>> concat_dataset = datasets.concatenate_datasets([featurized_student, featurized_teacher], axis=1)\r\n>>> type(concat_dataset[\"t_labels\"])\r\n\r\n```\r\nOne would expect to obtain the same type as the one before concatenation.\r\n\r\nAm I doing something wrong here? Any idea on how to fix this unexpected behavior?\r\n\r\n## Environment info\r\n- `datasets` version: 1.9.0\r\n- Platform: macOS-10.14.6-x86_64-i386-64bit\r\n- Python version: 3.9.5\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756","id":959255646,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMzk4Mjk1","number":2756,"title":"Fix metadata JSON for ubuntu_dialogs_corpus dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T15:48:59Z","updated_at":"2021-08-04T09:43:25Z","closed_at":"2021-08-04T09:43:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2756","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756.patch","merged_at":"2021-08-04T09:43:25Z"},"body":"Related to #2743.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755","id":959115888,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjgwMjI4","number":2755,"title":"Fix metadata JSON for turkish_movie_sentiment dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T13:25:44Z","updated_at":"2021-08-04T09:06:54Z","closed_at":"2021-08-04T09:06:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2755","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755.patch","merged_at":"2021-08-04T09:06:53Z"},"body":"Related to #2743.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754","id":959105577,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjcxMjM4","number":2754,"title":"Generate metadata JSON for telugu_books dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T13:14:52Z","updated_at":"2021-08-04T08:49:02Z","closed_at":"2021-08-04T08:49:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2754","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754.patch","merged_at":"2021-08-04T08:49:01Z"},"body":"Related to #2743.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753","id":959036995,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjEyMjMz","number":2753,"title":"Generate metadata JSON for reclor dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T11:52:29Z","updated_at":"2021-08-04T08:07:15Z","closed_at":"2021-08-04T08:07:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2753","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753.patch","merged_at":"2021-08-04T08:07:15Z"},"body":"Related to #2743.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752","id":959023608,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjAxMjAy","number":2752,"title":"Generate metadata JSON for lm1b dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T11:34:56Z","updated_at":"2021-08-04T06:40:40Z","closed_at":"2021-08-04T06:40:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2752","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752.patch","merged_at":"2021-08-04T06:40:39Z"},"body":"Related to #2743.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751","id":959021262,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMTk5MjA5","number":2751,"title":"Update metadata for wikihow dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T11:31:57Z","updated_at":"2021-08-03T15:52:09Z","closed_at":"2021-08-03T15:52:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2751","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751.patch","merged_at":"2021-08-03T15:52:09Z"},"body":"Update metadata for wikihow dataset:\r\n- Remove leading new line character in description and citation\r\n- Update metadata JSON\r\n- Remove no longer necessary `urls_checksums\/checksums.txt` file\r\n\r\nRelated to #2748.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2750","id":958984730,"node_id":"MDU6SXNzdWU5NTg5ODQ3MzA=","number":2750,"title":"Second concatenation of datasets produces errors","user":{"login":"Aktsvigun","id":36672861,"node_id":"MDQ6VXNlcjM2NjcyODYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36672861?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Aktsvigun","html_url":"https:\/\/github.com\/Aktsvigun","followers_url":"https:\/\/api.github.com\/users\/Aktsvigun\/followers","following_url":"https:\/\/api.github.com\/users\/Aktsvigun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Aktsvigun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Aktsvigun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Aktsvigun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Aktsvigun\/orgs","repos_url":"https:\/\/api.github.com\/users\/Aktsvigun\/repos","events_url":"https:\/\/api.github.com\/users\/Aktsvigun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Aktsvigun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-08-03T10:47:04Z","updated_at":"2022-01-19T14:23:43Z","closed_at":"2022-01-19T14:19:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI am need to concatenate my dataset with others several times, and after I concatenate it for the second time, the features of features (e.g. tags names) are collapsed. This hinders, for instance, the usage of tokenize function with `data.map`.\r\n\r\n```\r\nfrom datasets import load_dataset, concatenate_datasets\r\n\r\ndata = load_dataset('trec')['train']\r\nconcatenated = concatenate_datasets([data, data])\r\nconcatenated_2 = concatenate_datasets([concatenated, concatenated])\r\nprint('True features of features:', concatenated.features)\r\nprint('\\nProduced features of features:', concatenated_2.features)\r\n```\r\noutputs \r\n\r\n```\r\nTrue features of features: {'label-coarse': ClassLabel(num_classes=6, names=['DESC', 'ENTY', 'ABBR', 'HUM', 'NUM', 'LOC'], names_file=None, id=None), 'label-fine': ClassLabel(num_classes=47, names=['manner', 'cremat', 'animal', 'exp', 'ind', 'gr', 'title', 'def', 'date', 'reason', 'event', 'state', 'desc', 'count', 'other', 'letter', 'religion', 'food', 'country', 'color', 'termeq', 'city', 'body', 'dismed', 'mount', 'money', 'product', 'period', 'substance', 'sport', 'plant', 'techmeth', 'volsize', 'instru', 'abb', 'speed', 'word', 'lang', 'perc', 'code', 'dist', 'temp', 'symbol', 'ord', 'veh', 'weight', 'currency'], names_file=None, id=None), 'text': Value(dtype='string', id=None)}\r\n\r\nProduced features of features: {'label-coarse': Value(dtype='int64', id=None), 'label-fine': Value(dtype='int64', id=None), 'text': Value(dtype='string', id=None)}\r\n```\r\n\r\nI am using `datasets` v.1.11.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2749","id":958968748,"node_id":"MDU6SXNzdWU5NTg5Njg3NDg=","number":2749,"title":"Raise a proper exception when trying to stream a dataset that requires to manually download files","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-08-03T10:26:27Z","updated_at":"2021-08-09T08:53:35Z","closed_at":"2021-08-04T11:36:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nAt least for 'reclor', 'telugu_books', 'turkish_movie_sentiment', 'ubuntu_dialogs_corpus', 'wikihow', trying to `load_dataset` in streaming mode raises a `TypeError` without any detail about why it fails.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"reclor\", streaming=True)\r\n```\r\n\r\n## Expected results\r\n\r\nIdeally: raise a specific exception, something like `ManualDownloadError`.\r\n\r\nOr at least give the reason in the message, as when we load in normal mode:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"reclor\")\r\n```\r\n\r\n```\r\nAssertionError: The dataset reclor with config default requires manual data.\r\n Please follow the manual download instructions: to use ReClor you need to download it manually. Please go to its homepage (http:\/\/whyu.me\/reclor\/) fill the google\r\n form and you will receive a download link and a password to extract it.Please extract all files in one folder and use the path folder in datasets.load_dataset('reclor', data_dir='path\/to\/folder\/folder_name')\r\n .\r\n Manual data can be loaded with `datasets.load_dataset(reclor, data_dir='')\r\n```\r\n\r\n## Actual results\r\n\r\n```\r\nTypeError: expected str, bytes or os.PathLike object, not NoneType\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: macOS-11.5-x86_64-i386-64bit\r\n- Python version: 3.8.11\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748","id":958889041,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMDg4NTk4","number":2748,"title":"Generate metadata JSON for wikihow dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T08:55:40Z","updated_at":"2021-08-03T10:17:51Z","closed_at":"2021-08-03T10:17:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2748","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748.patch","merged_at":"2021-08-03T10:17:51Z"},"body":"Related to #2743.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747","id":958867627,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMDcwOTgy","number":2747,"title":"add multi-proc in `to_json`","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":17,"created_at":"2021-08-03T08:30:13Z","updated_at":"2021-10-19T18:24:21Z","closed_at":"2021-09-13T13:56:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2747","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747.patch","merged_at":"2021-09-13T13:56:37Z"},"body":"Closes #2663. I've tried adding multiprocessing in `to_json`. Here's some benchmarking I did to compare the timings of current version (say v1) and multi-proc version (say v2). I did this with `cpu_count` 4 (2015 Macbook Air)\r\n\r\n1. Dataset name: `ascent_kb` - 8.9M samples (all samples were used, reporting this for a single run)\r\nv1- ~225 seconds for converting whole dataset to json\r\nv2- ~200 seconds for converting whole dataset to json\r\n\r\n2. Dataset name: `lama` - 1.3M samples (all samples were used, reporting this for 2 runs)\r\nv1- ~26 seconds for converting whole dataset to json\r\nv2- ~23.6 seconds for converting whole dataset to json\r\n\r\nI think it's safe to say that v2 is 10% faster as compared to v1. Timings may improve further with better configuration.\r\n\r\nThe only bottleneck I feel is writing to file from the output list. If we can improve that aspect then timings may improve further. \r\n\r\nLet me know if any changes\/improvements can be done in this @stas00, @lhoestq, @albertvillanova. @lhoestq even suggested to extend this work with other export methods as well like `csv` or `parquet`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2746","id":958551619,"node_id":"MDU6SXNzdWU5NTg1NTE2MTk=","number":2746,"title":"Cannot load `few-nerd` dataset","user":{"login":"Mehrad0711","id":28717374,"node_id":"MDQ6VXNlcjI4NzE3Mzc0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28717374?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehrad0711","html_url":"https:\/\/github.com\/Mehrad0711","followers_url":"https:\/\/api.github.com\/users\/Mehrad0711\/followers","following_url":"https:\/\/api.github.com\/users\/Mehrad0711\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehrad0711\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehrad0711\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehrad0711\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehrad0711\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehrad0711\/repos","events_url":"https:\/\/api.github.com\/users\/Mehrad0711\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehrad0711\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-08-02T22:18:57Z","updated_at":"2021-11-16T08:51:34Z","closed_at":"2021-08-03T19:45:43Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nCannot load `few-nerd` dataset.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset('few-nerd', 'supervised')\r\n```\r\n\r\n## Actual results\r\n\r\nExecuting above code will give the following error:\r\n\r\n```\r\nUsing the latest cached version of the module from \/Users\/Mehrad\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/few-nerd\/62464ace912a40a0f33a11a8310f9041c9dc3590ff2b3c77c14d83ca53cfec53 (last modified on Wed Jun 2 11:34:25 2021) since it couldn't be found locally at \/Users\/Mehrad\/Documents\/GitHub\/genienlp\/few-nerd\/few-nerd.py, or remotely (FileNotFoundError).\r\nDownloading and preparing dataset few_nerd\/supervised (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/Users\/Mehrad\/.cache\/huggingface\/datasets\/few_nerd\/supervised\/0.0.0\/62464ace912a40a0f33a11a8310f9041c9dc3590ff2b3c77c14d83ca53cfec53...\r\nTraceback (most recent call last):\r\n File \"\/Users\/Mehrad\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 693, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/Users\/Mehrad\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1107, in _prepare_split\r\n disable=bool(logging.get_verbosity() == logging.NOTSET),\r\n File \"\/Users\/Mehrad\/opt\/anaconda3\/lib\/python3.7\/site-packages\/tqdm\/std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"\/Users\/Mehrad\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/few-nerd\/62464ace912a40a0f33a11a8310f9041c9dc3590ff2b3c77c14d83ca53cfec53\/few-nerd.py\", line 196, in _generate_examples\r\n with open(filepath, encoding=\"utf-8\") as f:\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/Users\/Mehrad\/.cache\/huggingface\/datasets\/downloads\/supervised\/train.json'\r\n```\r\nThe bug is probably in identifying and downloading the dataset. If I download the json splits directly from [link](https:\/\/github.com\/nbroad1881\/few-nerd\/tree\/main\/uncompressed) and put them under the downloads directory, they will be processed into arrow format correctly. \r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Python version: 3.8\r\n- PyArrow version: 1.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745","id":958269579,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxNTc0Mjcz","number":2745,"title":"added semeval18_emotion_classification dataset","user":{"login":"maxpel","id":31095360,"node_id":"MDQ6VXNlcjMxMDk1MzYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31095360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxpel","html_url":"https:\/\/github.com\/maxpel","followers_url":"https:\/\/api.github.com\/users\/maxpel\/followers","following_url":"https:\/\/api.github.com\/users\/maxpel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxpel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxpel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxpel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxpel\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxpel\/repos","events_url":"https:\/\/api.github.com\/users\/maxpel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxpel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-08-02T15:39:55Z","updated_at":"2021-10-29T09:22:05Z","closed_at":"2021-09-21T09:48:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2745","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745.patch","merged_at":"2021-09-21T09:48:35Z"},"body":"I added the data set of SemEval 2018 Task 1 (Subtask 5) for emotion detection in three languages.\r\n\r\n```\r\ndatasets-cli test datasets\/semeval18_emotion_classification\/ --save_infos --all_configs\r\n\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_real_dataset_semeval18_emotion_classification\r\n```\r\nBoth commands ran successfully.\r\n\r\nI couldn't create the dummy data (the files are tsvs but have .txt ending, maybe that's the problem?) and therefore the test on the dummy data fails, maybe someone can help here.\r\n\r\nI also formatted the code:\r\n```\r\nblack --line-length 119 --target-version py36 datasets\/semeval18_emotion_classification\/\r\nisort datasets\/semeval18_emotion_classification\/\r\nflake8 datasets\/semeval18_emotion_classification\/\r\n```\r\nThat's the publication for reference:\r\n\r\nMohammad, S., Bravo-Marquez, F., Salameh, M., & Kiritchenko, S. (2018). SemEval-2018 task 1: Affect in tweets. Proceedings of the 12th International Workshop on Semantic Evaluation, 1\u201317. https:\/\/doi.org\/10.18653\/v1\/S18-1001","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744","id":958146637,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxNDY4NDcz","number":2744,"title":"Fix key by recreating metadata JSON for journalists_questions dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T13:27:53Z","updated_at":"2021-08-03T09:25:34Z","closed_at":"2021-08-03T09:25:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2744","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744.patch","merged_at":"2021-08-03T09:25:33Z"},"body":"Close #2743.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2743","id":958119251,"node_id":"MDU6SXNzdWU5NTgxMTkyNTE=","number":2743,"title":"Dataset JSON is incorrect","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-08-02T13:01:26Z","updated_at":"2021-08-03T10:06:57Z","closed_at":"2021-08-03T09:25:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nThe JSON file generated for https:\/\/github.com\/huggingface\/datasets\/blob\/573f3d35081cee239d1b962878206e9abe6cde91\/datasets\/journalists_questions\/journalists_questions.py is https:\/\/github.com\/huggingface\/datasets\/blob\/573f3d35081cee239d1b962878206e9abe6cde91\/datasets\/journalists_questions\/dataset_infos.json.\r\n\r\nThe only config should be `plain_text`, but the first key in the JSON is `journalists_questions` (the dataset id) instead.\r\n\r\n```json\r\n{\r\n \"journalists_questions\": {\r\n \"description\": \"The journalists_questions corpus (version 1.0) is a collection of 10K human-written Arabic\\ntweets manually labeled for question identification over Arabic tweets posted by journalists.\\n\",\r\n ...\r\n```\r\n\r\n## Steps to reproduce the bug\r\n\r\nLook at the files.\r\n\r\n## Expected results\r\n\r\nThe first key should be `plain_text`:\r\n\r\n```json\r\n{\r\n \"plain_text\": {\r\n \"description\": \"The journalists_questions corpus (version 1.0) is a collection of 10K human-written Arabic\\ntweets manually labeled for question identification over Arabic tweets posted by journalists.\\n\",\r\n ...\r\n```\r\n\r\n## Actual results\r\n\r\n```json\r\n{\r\n \"journalists_questions\": {\r\n \"description\": \"The journalists_questions corpus (version 1.0) is a collection of 10K human-written Arabic\\ntweets manually labeled for question identification over Arabic tweets posted by journalists.\\n\",\r\n ...\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2742","id":958114064,"node_id":"MDU6SXNzdWU5NTgxMTQwNjQ=","number":2742,"title":"Improve detection of streamable file types","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-08-02T12:55:09Z","updated_at":"2021-11-12T17:18:10Z","closed_at":"2021-11-12T17:18:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\n```python\r\nfrom datasets import load_dataset_builder\r\nfrom datasets.utils.streaming_download_manager import StreamingDownloadManager\r\nbuilder = load_dataset_builder(\"journalists_questions\", name=\"plain_text\")\r\nbuilder._split_generators(StreamingDownloadManager(base_path=builder.base_path))\r\n```\r\n\r\nraises\r\n\r\n```\r\nNotImplementedError: Extraction protocol for file at https:\/\/drive.google.com\/uc?export=download&id=1CBrh-9OrSpKmPQBxTK_ji6mq6WTN_U9U is not implemented yet\r\n```\r\n\r\nBut the file at https:\/\/drive.google.com\/uc?export=download&id=1CBrh-9OrSpKmPQBxTK_ji6mq6WTN_U9U is a text file and it can be streamed:\r\n\r\n```bash\r\ncurl --header \"Range: bytes=0-100\" -L https:\/\/drive.google.com\/uc\\?export\\=download\\&id\\=1CBrh-9OrSpKmPQBxTK_ji6mq6WTN_U9U\r\n506938088174940160 yes 1\r\n302221719412830209 yes 1\r\n289761704907268096 yes 1\r\n513820885032378369 yes %\r\n```\r\n\r\nYet, it's wrongly categorized as a file type that cannot be streamed because the test is currently based on 1. the presence of a file extension at the end of the URL (here: no extension), and 2. the inclusion of this extension in a list of supported formats.\r\n\r\n**Describe the solution you'd like**\r\n\r\nIn the case of an URL (instead of a local path), ask for the MIME type, and decide on that value? Note that it would not work in that case, because the value of `content_type` is `text\/html; charset=UTF-8`.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nAdd a variable in the dataset script to set the data format by hand.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2741","id":957979559,"node_id":"MDU6SXNzdWU5NTc5Nzk1NTk=","number":2741,"title":"Add Hypersim dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T10:06:50Z","updated_at":"2021-12-08T12:06:51Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Hypersim\r\n- **Description:** photorealistic synthetic dataset for holistic indoor scene understanding\r\n- **Paper:** *link to the dataset paper if available*\r\n- **Data:** https:\/\/github.com\/apple\/ml-hypersim\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740","id":957911035,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxMjY0NTI3","number":2740,"title":"Update release instructions","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T08:46:00Z","updated_at":"2021-08-02T14:39:56Z","closed_at":"2021-08-02T14:39:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2740","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740.patch","merged_at":"2021-08-02T14:39:56Z"},"body":"Update release instructions.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739","id":957751260,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxMTI0ODQ3","number":2739,"title":"Pass tokenize to sacrebleu only if explicitly passed by user","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T05:09:05Z","updated_at":"2021-08-03T04:23:37Z","closed_at":"2021-08-03T04:23:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2739","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739.patch","merged_at":"2021-08-03T04:23:37Z"},"body":"Next `sacrebleu` release (v2.0.0) will remove `sacrebleu.DEFAULT_TOKENIZER`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR passes `tokenize` to `sacrebleu` only if explicitly passed by the user, otherwise it will not pass it (and `sacrebleu` will use its default, no matter where it is and how it is called).\r\n\r\nClose: #2737.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738","id":957517746,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwOTI5NzA4","number":2738,"title":"Sunbird AI Ugandan low resource language dataset","user":{"login":"ak3ra","id":12105163,"node_id":"MDQ6VXNlcjEyMTA1MTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12105163?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ak3ra","html_url":"https:\/\/github.com\/ak3ra","followers_url":"https:\/\/api.github.com\/users\/ak3ra\/followers","following_url":"https:\/\/api.github.com\/users\/ak3ra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ak3ra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ak3ra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ak3ra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ak3ra\/orgs","repos_url":"https:\/\/api.github.com\/users\/ak3ra\/repos","events_url":"https:\/\/api.github.com\/users\/ak3ra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ak3ra\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-08-01T15:18:00Z","updated_at":"2021-12-01T09:18:09Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2738","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738.patch","merged_at":null},"body":"Multi-way parallel text corpus of 5 key Ugandan languages for the task of machine translation. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2737","id":957124881,"node_id":"MDU6SXNzdWU5NTcxMjQ4ODE=","number":2737,"title":"SacreBLEU update","user":{"login":"devrimcavusoglu","id":46989091,"node_id":"MDQ6VXNlcjQ2OTg5MDkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46989091?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/devrimcavusoglu","html_url":"https:\/\/github.com\/devrimcavusoglu","followers_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/followers","following_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/repos","events_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-07-30T23:53:08Z","updated_at":"2021-09-22T10:47:41Z","closed_at":"2021-08-03T04:23:37Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"With the latest release of [sacrebleu](https:\/\/github.com\/mjpost\/sacrebleu), `datasets.metrics.sacrebleu` is broken, and getting error.\r\n\r\n AttributeError: module 'sacrebleu' has no attribute 'DEFAULT_TOKENIZER'\r\n\r\nthis happens since in new version of sacrebleu there is no `DEFAULT_TOKENIZER`, but sacrebleu.py tries to import it anyways. This can be fixed currently with fixing `sacrebleu==1.5.0`\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nsacrebleu= datasets.load_metric('sacrebleu')\r\npredictions = [\"It is a guide to action which ensures that the military always obeys the commands of the party\"]\r\nreferences = [\"It is a guide to action that ensures that the military will forever heed Party commands\"]\r\nresults = sacrebleu.compute(predictions=predictions, references=references)\r\nprint(results)\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: Python 3.8.0\r\n- PyArrow version: 5.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2736","id":956895199,"node_id":"MDU6SXNzdWU5NTY4OTUxOTk=","number":2736,"title":"Add Microsoft Building Footprints dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-30T16:17:08Z","updated_at":"2021-12-08T12:09:03Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Microsoft Building Footprints\r\n- **Description:** With the goal to increase the coverage of building footprint data available as open data for OpenStreetMap and humanitarian efforts, we have released millions of building footprints as open data available to download free of charge.\r\n- **Paper:** *link to the dataset paper if available*\r\n- **Data:** https:\/\/www.microsoft.com\/en-us\/maps\/building-footprints\r\n- **Motivation:** this can be a useful dataset for researchers working on climate change adaptation, urban studies, geography, etc.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nReported by: @sashavor","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2735","id":956889365,"node_id":"MDU6SXNzdWU5NTY4ODkzNjU=","number":2735,"title":"Add Open Buildings dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-30T16:08:39Z","updated_at":"2021-07-31T05:01:25Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Open Buildings\r\n- **Description:** A dataset of building footprints to support social good applications.\r\n\r\n Building footprints are useful for a range of important applications, from population estimation, urban planning and humanitarian response, to environmental and climate science. This large-scale open dataset contains the outlines of buildings derived from high-resolution satellite imagery in order to support these types of uses. The project being based in Ghana, the current focus is on the continent of Africa.\r\n\r\n See: \"Mapping Africa's Buildings with Satellite Imagery\" https:\/\/ai.googleblog.com\/2021\/07\/mapping-africas-buildings-with.html\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2107.12283\r\n- **Data:** https:\/\/sites.research.google\/open-buildings\/\r\n- **Motivation:** *what are some good reasons to have this dataset*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nReported by: @osanseviero ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734","id":956844874,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwMzc4NjI4","number":2734,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-30T15:22:51Z","updated_at":"2021-07-30T15:47:58Z","closed_at":"2021-07-30T15:47:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2734","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734.patch","merged_at":"2021-07-30T15:47:58Z"},"body":"Update BibTeX entry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733","id":956725476,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwMjc1NDMy","number":2733,"title":"Add missing parquet known extension","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-30T13:01:20Z","updated_at":"2021-07-30T13:24:31Z","closed_at":"2021-07-30T13:24:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2733","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733.patch","merged_at":"2021-07-30T13:24:30Z"},"body":"This code was failing because the parquet extension wasn't recognized:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nbase_url = \"https:\/\/storage.googleapis.com\/huggingface-nlp\/cache\/datasets\/wikipedia\/20200501.en\/1.0.0\/\"\r\ndata_files = {\"train\": base_url + \"wikipedia-train.parquet\"}\r\nwiki = load_dataset(\"parquet\", data_files=data_files, split=\"train\", streaming=True)\r\n```\r\n\r\nIt raises\r\n```python\r\nNotImplementedError: Extraction protocol for file at https:\/\/storage.googleapis.com\/huggingface-nlp\/cache\/datasets\/wikipedia\/20200501.en\/1.0.0\/wikipedia-train.parquet is not implemented yet\r\n```\r\n\r\nI added `parquet` to the list of known extensions\r\n\r\nEDIT: added pickle, conllu, xml extensions as well","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732","id":956676360,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwMjMzMzQy","number":2732,"title":"Updated TTC4900 Dataset","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-30T11:52:14Z","updated_at":"2021-07-30T16:00:51Z","closed_at":"2021-07-30T15:58:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2732","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732.patch","merged_at":"2021-07-30T15:58:14Z"},"body":"- The source address of the TTC4900 dataset of [@savasy](https:\/\/github.com\/savasy) has been updated for direct download.\r\n- Updated readme.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731","id":956087452,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk5NzQwMjg5","number":2731,"title":"Adding to_tf_dataset method","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-07-29T18:10:25Z","updated_at":"2021-09-16T13:50:54Z","closed_at":"2021-09-16T13:50:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2731","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731.patch","merged_at":"2021-09-16T13:50:53Z"},"body":"Oh my **god** do not merge this yet, it's just a draft.\r\n\r\nI've added a method (via a mixin) to the `arrow_dataset.Dataset` class that automatically converts our Dataset classes to TF Dataset classes ready for training. It hopefully has most of the features we want, including streaming from disk (no need to load the whole dataset in memory!), correct shuffling, variable-length batches to reduce compute, and correct support for unusual padding. It achieves that by calling the tokenizer `pad` method in the middle of a TF compute graph via a very hacky call to `tf.py_function`, which is heretical but seems to work.\r\n\r\nA number of issues need to be resolved before it's ready to merge, though:\r\n\r\n1) Is a MixIn the right way to do this? Do other classes besides `arrow_dataset.Dataset` need this method too?\r\n2) Needs an argument to support constant-length batches for TPU training - this is easy to add and I'll do it soon.\r\n3) Needs the user to supply the list of columns to drop from the arrow `Dataset`. Is there some automatic way to get the columns we want, or see which columns were added by the tokenizer?\r\n4) Assumes the label column is always present and always called \"label\" - this is probably not great, but I'm not sure what the 'correct' thing to do here is.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2730","id":955987834,"node_id":"MDU6SXNzdWU5NTU5ODc4MzQ=","number":2730,"title":"Update CommonVoice with new release","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-29T15:59:59Z","updated_at":"2021-08-07T16:19:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** CommonVoice mid-2021 release\r\n- **Description:** more data in CommonVoice: Languages that have increased the most by percentage are Thai (almost 20x growth, from 12 hours to 250 hours), Luganda (almost 9x growth, from 8 to 80), Esperanto (7x growth, from 100 to 840), and Tamil (almost 8x, from 24 to 220).\r\n- **Paper:** https:\/\/discourse.mozilla.org\/t\/common-voice-2021-mid-year-dataset-release\/83812\r\n- **Data:** https:\/\/commonvoice.mozilla.org\/en\/datasets\r\n- **Motivation:** More data and more varied. I think we just need to add configs in the existing dataset script.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729","id":955920489,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk5NTk5MjA4","number":2729,"title":"Fix IndexError while loading Arabic Billion Words dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-29T14:47:02Z","updated_at":"2021-07-30T13:03:55Z","closed_at":"2021-07-30T13:03:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2729","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729.patch","merged_at":"2021-07-30T13:03:55Z"},"body":"Catch `IndexError` and ignore that record.\r\n\r\nClose #2727.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2728","id":955892970,"node_id":"MDU6SXNzdWU5NTU4OTI5NzA=","number":2728,"title":"Concurrent use of same dataset (already downloaded)","user":{"login":"PierreColombo","id":22492839,"node_id":"MDQ6VXNlcjIyNDkyODM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22492839?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PierreColombo","html_url":"https:\/\/github.com\/PierreColombo","followers_url":"https:\/\/api.github.com\/users\/PierreColombo\/followers","following_url":"https:\/\/api.github.com\/users\/PierreColombo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PierreColombo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PierreColombo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PierreColombo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PierreColombo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PierreColombo\/repos","events_url":"https:\/\/api.github.com\/users\/PierreColombo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PierreColombo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-29T14:18:38Z","updated_at":"2021-08-02T07:25:57Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen launching several jobs at the same time loading the same dataset trigger some errors see (last comments).\r\n\r\n## Steps to reproduce the bug\r\nexport HF_DATASETS_CACHE=\/gpfswork\/rech\/toto\/datasets\r\nfor MODEL in \"bert-base-uncased\" \"roberta-base\" \"distilbert-base-cased\"; do # \"bert-base-uncased\" \"bert-large-cased\" \"roberta-large\" \"albert-base-v1\" \"albert-large-v1\"; do\r\n for TASK_NAME in \"mrpc\" \"rte\" 'imdb' \"paws\" \"mnli\"; do\r\n export OUTPUT_DIR=${MODEL}_${TASK_NAME}\r\n sbatch --job-name=${OUTPUT_DIR} \\\r\n --gres=gpu:1 \\\r\n --no-requeue \\\r\n --cpus-per-task=10 \\\r\n --hint=nomultithread \\\r\n --time=1:00:00 \\\r\n --output=jobinfo\/${OUTPUT_DIR}_%j.out \\\r\n --error=jobinfo\/${OUTPUT_DIR}_%j.err \\\r\n --qos=qos_gpu-t4 \\\r\n --wrap=\"module purge; module load pytorch-gpu\/py3\/1.7.0 ; export HF_DATASETS_OFFLINE=1; export HF_DATASETS_CACHE=\/gpfswork\/rech\/toto\/datasets; python compute_measures.py --seed=$SEED --saving_path=results --batch_size=$BATCH_SIZE --task_name=$TASK_NAME --model_name=\/gpfswork\/rech\/toto\/transformers_models\/$MODEL\"\r\n\r\n done\r\ndone\r\n\r\n\r\n\r\n```python\r\n# Sample code to reproduce the bug\r\n dataset_train = load_dataset('imdb', split='train', download_mode=\"reuse_cache_if_exists\")\r\n dataset_train = dataset_train.map(lambda e: tokenizer(e['text'], truncation=True, padding='max_length'),\r\n batched=True).select(list(range(args.filter)))\r\n\r\n dataset_val = load_dataset('imdb', split='train', download_mode=\"reuse_cache_if_exists\")\r\n dataset_val = dataset_val.map(lambda e: tokenizer(e['text'], truncation=True, padding='max_length'),\r\n batched=True).select(list(range(args.filter, args.filter + 5000)))\r\n\r\n dataset_test = load_dataset('imdb', split='test', download_mode=\"reuse_cache_if_exists\")\r\n dataset_test = dataset_test.map(lambda e: tokenizer(e['text'], truncation=True, padding='max_length'),\r\n batched=True)\r\n```\r\n\r\n## Expected results\r\nI believe I am doing something wrong with the objects. \r\n\r\n## Actual results\r\nTraceback (most recent call last):\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 983, in _prepare_split\r\n check_duplicates=True,\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 192, in __init__\r\n self.stream = pa.OSFile(self._path, \"wb\")\r\n File \"pyarrow\/io.pxi\", line 829, in pyarrow.lib.OSFile.__cinit__\r\n File \"pyarrow\/io.pxi\", line 844, in pyarrow.lib.OSFile._open_writable\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 97, in pyarrow.lib.check_status\r\nFileNotFoundError: [Errno 2] Failed to open local file '\/gpfswork\/rech\/tts\/unm25jp\/datasets\/paws\/labeled_final\/1.1.0\/09d8fae989bb569009a8f5b879ccf2924d3e5cd55bfe2e89e6dab1c0b50ecd34.incomplete\/paws-test.arrow'. Detail: [errno 2] No such file or directory\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"compute_measures.py\", line 181, in \r\n train_loader, val_loader, test_loader = get_dataloader(args)\r\n File \"\/gpfsdswork\/projects\/rech\/toto\/intRAOcular\/dataset_utils.py\", line 69, in get_dataloader\r\n dataset_train = load_dataset('paws', \"labeled_final\", split='train', download_mode=\"reuse_cache_if_exists\")\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 748, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 658, in _download_and_prepare\r\n + str(e)\r\nOSError: Cannot find data file.\r\nOriginal error:\r\n[Errno 2] Failed to open local file '\/gpfswork\/rech\/toto\/datasets\/paws\/labeled_final\/1.1.0\/09d8fae989bb569009a8f5b879ccf2924d3e5cd55bfe2e89e6dab1c0b50ecd34.incomplete\/paws-test.arrow'. Detail: [errno 2] No such file or directory\r\n\r\n## Environment info\r\n\r\n- `datasets` version: datasets==1.8.0\r\n- Platform: linux (jeanzay)\r\n- Python version: pyarrow==2.0.0\r\n- PyArrow version: 3.7.8\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2727","id":955812149,"node_id":"MDU6SXNzdWU5NTU4MTIxNDk=","number":2727,"title":"Error in loading the Arabic Billion Words Corpus","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-07-29T12:53:09Z","updated_at":"2021-07-30T13:03:55Z","closed_at":"2021-07-30T13:03:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI get `IndexError: list index out of range` when trying to load the `Techreen` and `Almustaqbal` configs of the dataset.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset(\"arabic_billion_words\", \"Techreen\")\r\nload_dataset(\"arabic_billion_words\", \"Almustaqbal\")\r\n```\r\n\r\n## Expected results\r\nThe datasets load succefully.\r\n\r\n## Actual results\r\n```python\r\n_extract_tags(self, sample, tag)\r\n 139 if len(out) > 0:\r\n 140 break\r\n--> 141 return out[0]\r\n 142 \r\n 143 def _clean_text(self, text):\r\n\r\nIndexError: list index out of range\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.10.2\r\n- Platform: Ubuntu 18.04.5 LTS\r\n- Python version: 3.7.11\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726","id":955674388,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk5Mzg5MDk1","number":2726,"title":"Typo fix `tokenize_exemple`","user":{"login":"shabie","id":30535146,"node_id":"MDQ6VXNlcjMwNTM1MTQ2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30535146?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shabie","html_url":"https:\/\/github.com\/shabie","followers_url":"https:\/\/api.github.com\/users\/shabie\/followers","following_url":"https:\/\/api.github.com\/users\/shabie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shabie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shabie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shabie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shabie\/orgs","repos_url":"https:\/\/api.github.com\/users\/shabie\/repos","events_url":"https:\/\/api.github.com\/users\/shabie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shabie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-29T10:03:37Z","updated_at":"2021-07-29T12:00:25Z","closed_at":"2021-07-29T12:00:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2726","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726.patch","merged_at":"2021-07-29T12:00:25Z"},"body":"There is a small typo in the main README.md","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725","id":955020776,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk4ODMwNjYw","number":2725,"title":"Pass use_auth_token to request_etags","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-28T16:13:29Z","updated_at":"2021-07-28T16:38:02Z","closed_at":"2021-07-28T16:38:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2725","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725.patch","merged_at":"2021-07-28T16:38:01Z"},"body":"Fix #2724.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2724","id":954919607,"node_id":"MDU6SXNzdWU5NTQ5MTk2MDc=","number":2724,"title":"404 Error when loading remote data files from private repo","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-07-28T14:24:23Z","updated_at":"2021-07-29T04:58:49Z","closed_at":"2021-07-28T16:38:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen loading remote data files from a private repo, a 404 error is raised.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nurl = hf_hub_url(\"lewtun\/asr-preds-test\", \"preds.jsonl\", repo_type=\"dataset\")\r\ndset = load_dataset(\"json\", data_files=url, use_auth_token=True)\r\n# HTTPError: 404 Client Error: Not Found for url: https:\/\/huggingface.co\/datasets\/lewtun\/asr-preds-test\/resolve\/main\/preds.jsonl\r\n```\r\n\r\n## Expected results\r\nLoad dataset.\r\n\r\n## Actual results\r\n404 Error.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723","id":954864104,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk4Njk0NDMw","number":2723,"title":"Fix en subset by modifying dataset_info with correct validation infos","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-28T13:36:19Z","updated_at":"2021-07-28T15:22:23Z","closed_at":"2021-07-28T15:22:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2723","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723.patch","merged_at":"2021-07-28T15:22:23Z"},"body":"- Related to: #2682 \r\n\r\nWe correct the values of `en` subset concerning the expected validation values (both `num_bytes` and `num_examples`.\r\n\r\nInstead of having:\r\n\r\n`{\"name\": \"validation\", \"num_bytes\": 828589180707, \"num_examples\": 364868892, \"dataset_name\": \"c4\"}`\r\n\r\nWe replace with correct values:\r\n\r\n`{\"name\": \"validation\", \"num_bytes\": 825767266, \"num_examples\": 364608, \"dataset_name\": \"c4\"}`\r\n\r\nThere are still issues with validation with other subsets, but I can't download all the files, unzip to check for the correct number of bytes. (If you have a fast way to obtain those values for other subsets, I can do this in this PR ... otherwise I can't spend those resources)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2722","id":954446053,"node_id":"MDU6SXNzdWU5NTQ0NDYwNTM=","number":2722,"title":"Missing cache file","user":{"login":"PosoSAgapo","id":33200481,"node_id":"MDQ6VXNlcjMzMjAwNDgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33200481?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PosoSAgapo","html_url":"https:\/\/github.com\/PosoSAgapo","followers_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/followers","following_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/repos","events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-28T03:52:07Z","updated_at":"2021-07-28T09:07:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Strangely missing cache file after I restart my program again.\r\n\r\n`glue_dataset = datasets.load_dataset('glue', 'sst2')`\r\n\r\n`FileNotFoundError: [Errno 2] No such file or directory: \/Users\/chris\/.cache\/huggingface\/datasets\/glue\/sst2\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96d6053ad\/dataset_info.json'`\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721","id":954238230,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk4MTY0Njg3","number":2721,"title":"Deal with the bad check in test_load.py","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-27T20:23:23Z","updated_at":"2021-07-28T09:58:34Z","closed_at":"2021-07-28T08:53:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2721","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721.patch","merged_at":"2021-07-28T08:53:18Z"},"body":"This PR removes a check that's been added in #2684. My intention with this check was to capture an URL in the error message, but instead, it captures a substring of the previous regex match in the test function. Another option would be to replace this check with:\r\n```python\r\nm_paths = re.findall(r\"\\S*_dummy\/_dummy.py\\b\", str(exc_info.value)) # on Linux this will match an URL as well as a local_path due to different os.sep, so take the last element (an URL always comes last in the list)\r\nassert len(m_paths) > 0 and is_remote_url(m_paths[-1]) # is_remote_url comes from datasets.utils.file_utils\r\n```\r\n\r\n@lhoestq Let me know which one of these two approaches (delete or replace) do you prefer?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720","id":954024426,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk3OTgxNjMx","number":2720,"title":"fix: \ud83d\udc1b fix two typos","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-27T15:50:17Z","updated_at":"2021-07-27T18:38:17Z","closed_at":"2021-07-27T18:38:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2720","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720.patch","merged_at":"2021-07-27T18:38:16Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2719","id":953932416,"node_id":"MDU6SXNzdWU5NTM5MzI0MTY=","number":2719,"title":"Use ETag in streaming mode to detect resource updates","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":3470211881,"node_id":"LA_kwDODunzps7O1zsp","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset-viewer","name":"dataset-viewer","color":"E5583E","default":false,"description":"Related to the dataset viewer on huggingface.co"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-27T14:17:09Z","updated_at":"2021-10-22T09:36:08Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nI want to cache data I generate from processing a dataset I've loaded in streaming mode, but I've currently no way to know if the remote data has been updated or not, thus I don't know when to invalidate my cache.\r\n\r\n**Describe the solution you'd like**\r\n\r\nTake the ETag of the data files into account and provide it (directly or through a hash) to give a signal that I can invalidate my cache.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nNone\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718","id":953360663,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk3NDE0NTQy","number":2718,"title":"New documentation structure","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-07-26T23:15:13Z","updated_at":"2021-09-13T17:20:53Z","closed_at":"2021-09-13T17:20:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2718","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718.patch","merged_at":"2021-09-13T17:20:52Z"},"body":"Organize Datasets documentation into four documentation types to improve clarity and discoverability of content.\r\n\r\n**Content to add in the very short term (feel free to add anything I'm missing):**\r\n- A discussion on why Datasets uses Arrow that includes some context and background about why we use Arrow. Would also be great to talk about Datasets speed and performance here, and if you can share any benchmarking\/tests you did, that would be awesome! Finally, a discussion about how memory-mapping frees the user from RAM constraints would be very helpful.\r\n- Explain why you would want to disable or override verifications when loading a dataset.\r\n- If possible, include a code sample of when the number of elements in the field of an output dictionary aren\u2019t the same as the other fields in the output dictionary (taken from the [note](https:\/\/huggingface.co\/docs\/datasets\/processing.html#augmenting-the-dataset) here).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717","id":952979976,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk3MDkzNDEx","number":2717,"title":"Fix shuffle on IterableDataset that disables batching in case any functions were mapped","user":{"login":"amankhandelia","id":7098967,"node_id":"MDQ6VXNlcjcwOTg5Njc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7098967?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amankhandelia","html_url":"https:\/\/github.com\/amankhandelia","followers_url":"https:\/\/api.github.com\/users\/amankhandelia\/followers","following_url":"https:\/\/api.github.com\/users\/amankhandelia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amankhandelia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amankhandelia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amankhandelia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amankhandelia\/orgs","repos_url":"https:\/\/api.github.com\/users\/amankhandelia\/repos","events_url":"https:\/\/api.github.com\/users\/amankhandelia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amankhandelia\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-26T14:42:22Z","updated_at":"2021-07-26T18:04:14Z","closed_at":"2021-07-26T16:30:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2717","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717.patch","merged_at":"2021-07-26T16:30:05Z"},"body":"Made a very minor change to fix the issue#2716. Added the missing argument in the constructor call.\r\n\r\nAs discussed in the bug report, the change is made to prevent the `shuffle` method call from resetting the value of `batched` attribute in `MappedExamplesIterable`\r\n\r\nFix #2716.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2716","id":952902778,"node_id":"MDU6SXNzdWU5NTI5MDI3Nzg=","number":2716,"title":"Calling shuffle on IterableDataset will disable batching in case any functions were mapped","user":{"login":"amankhandelia","id":7098967,"node_id":"MDQ6VXNlcjcwOTg5Njc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7098967?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amankhandelia","html_url":"https:\/\/github.com\/amankhandelia","followers_url":"https:\/\/api.github.com\/users\/amankhandelia\/followers","following_url":"https:\/\/api.github.com\/users\/amankhandelia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amankhandelia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amankhandelia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amankhandelia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amankhandelia\/orgs","repos_url":"https:\/\/api.github.com\/users\/amankhandelia\/repos","events_url":"https:\/\/api.github.com\/users\/amankhandelia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amankhandelia\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-26T13:24:59Z","updated_at":"2021-07-26T18:04:43Z","closed_at":"2021-07-26T18:04:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When using dataset in streaming mode, if one applies `shuffle` method on the dataset and `map` method for which `batched=True` than the batching operation will not happen, instead `batched` will be set to `False`\r\n\r\nI did RCA on the dataset codebase, the problem is emerging from [this line of code](https:\/\/github.com\/huggingface\/datasets\/blob\/d25a0bf94d9f9a9aa6cabdf5b450b9c327d19729\/src\/datasets\/iterable_dataset.py#L197) here as it is\r\n`self.ex_iterable.shuffle_data_sources(seed), function=self.function, batch_size=self.batch_size`, as one can see it is missing batched argument, which means that the iterator fallsback to default constructor value, which in this case is `False`.\r\nTo remedy the problem we can change this line to\r\n`self.ex_iterable.shuffle_data_sources(seed), function=self.function, batched=self.batched, batch_size=self.batch_size`\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715","id":952845229,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk2OTc5MjQ1","number":2715,"title":"Update PAN-X data URL in XTREME dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-26T12:21:17Z","updated_at":"2021-07-26T13:27:59Z","closed_at":"2021-07-26T13:27:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2715","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715.patch","merged_at":"2021-07-26T13:27:59Z"},"body":"Related to #2710, #2691.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2714","id":952580820,"node_id":"MDU6SXNzdWU5NTI1ODA4MjA=","number":2714,"title":"add more precise information for size","user":{"login":"pennyl67","id":1493902,"node_id":"MDQ6VXNlcjE0OTM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1493902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pennyl67","html_url":"https:\/\/github.com\/pennyl67","followers_url":"https:\/\/api.github.com\/users\/pennyl67\/followers","following_url":"https:\/\/api.github.com\/users\/pennyl67\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pennyl67\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pennyl67\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pennyl67\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pennyl67\/orgs","repos_url":"https:\/\/api.github.com\/users\/pennyl67\/repos","events_url":"https:\/\/api.github.com\/users\/pennyl67\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pennyl67\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-26T07:11:03Z","updated_at":"2021-07-26T09:16:25Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"For the import into ELG, we would like a more precise description of the size of the dataset, instead of the current size categories. The size can be expressed in bytes, or any other preferred size unit. As suggested in the slack channel, perhaps this could be computed with a regex for existing datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713","id":952515256,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk2Njk3MzU0","number":2713,"title":"Enumerate all ner_tags values in WNUT 17 dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-26T05:22:16Z","updated_at":"2021-07-26T09:30:55Z","closed_at":"2021-07-26T09:30:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2713","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713.patch","merged_at":"2021-07-26T09:30:54Z"},"body":"This PR does:\r\n- Enumerate all ner_tags in dataset card Data Fields section\r\n- Add all metadata tags to dataset card\r\n\r\nClose #2709.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710","id":951723326,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk2MDYyNjAy","number":2710,"title":"Update WikiANN data URL","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-23T16:29:21Z","updated_at":"2021-07-26T09:34:23Z","closed_at":"2021-07-26T09:34:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2710","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710.patch","merged_at":"2021-07-26T09:34:22Z"},"body":"WikiANN data source URL is no longer accessible: 404 error from Dropbox.\r\n\r\nWe have decided to host it at Hugging Face. This PR updates the data source URL, the metadata JSON file and the dataset card.\r\n\r\nClose #2691.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2709","id":951534757,"node_id":"MDU6SXNzdWU5NTE1MzQ3NTc=","number":2709,"title":"Missing documentation for wnut_17 (ner_tags)","user":{"login":"maxpel","id":31095360,"node_id":"MDQ6VXNlcjMxMDk1MzYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31095360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxpel","html_url":"https:\/\/github.com\/maxpel","followers_url":"https:\/\/api.github.com\/users\/maxpel\/followers","following_url":"https:\/\/api.github.com\/users\/maxpel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxpel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxpel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxpel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxpel\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxpel\/repos","events_url":"https:\/\/api.github.com\/users\/maxpel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxpel\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-23T12:25:32Z","updated_at":"2021-07-26T09:30:55Z","closed_at":"2021-07-26T09:30:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"On the info page of the wnut_17 data set (https:\/\/huggingface.co\/datasets\/wnut_17), the model output of ner-tags is only documented for these 5 cases:\r\n\r\n`ner_tags: a list of classification labels, with possible values including O (0), B-corporation (1), I-corporation (2), B-creative-work (3), I-creative-work (4).`\r\n\r\nI trained a model with the data and it gives me 13 classes:\r\n\r\n```\r\n\"id2label\": {\r\n \"0\": 0,\r\n \"1\": 1,\r\n \"2\": 2,\r\n \"3\": 3,\r\n \"4\": 4,\r\n \"5\": 5,\r\n \"6\": 6,\r\n \"7\": 7,\r\n \"8\": 8,\r\n \"9\": 9,\r\n \"10\": 10,\r\n \"11\": 11,\r\n \"12\": 12\r\n }\r\n\r\n \"label2id\": {\r\n \"0\": 0,\r\n \"1\": 1,\r\n \"10\": 10,\r\n \"11\": 11,\r\n \"12\": 12,\r\n \"2\": 2,\r\n \"3\": 3,\r\n \"4\": 4,\r\n \"5\": 5,\r\n \"6\": 6,\r\n \"7\": 7,\r\n \"8\": 8,\r\n \"9\": 9\r\n }\r\n```\r\nThe paper (https:\/\/www.aclweb.org\/anthology\/W17-4418.pdf) explains those 6 categories, but the ordering does not match:\r\n\r\n```\r\n1. person\r\n2. location (including GPE, facility)\r\n3. corporation\r\n4. product (tangible goods, or well-defined\r\nservices)\r\n5. creative-work (song, movie, book and\r\nso on)\r\n6. group (subsuming music band, sports team,\r\nand non-corporate organisations)\r\n```\r\nI would be very helpful for me, if somebody could clarify the model ouputs and explain the \"B-\" and \"I-\" prefixes to me.\r\n\r\nReally great work with that and the other packages, I couldn't believe that training the model with that data was basically a one-liner!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2708","id":951092660,"node_id":"MDU6SXNzdWU5NTEwOTI2NjA=","number":2708,"title":"QASC: incomplete training set ","user":{"login":"danyaljj","id":2441454,"node_id":"MDQ6VXNlcjI0NDE0NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2441454?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danyaljj","html_url":"https:\/\/github.com\/danyaljj","followers_url":"https:\/\/api.github.com\/users\/danyaljj\/followers","following_url":"https:\/\/api.github.com\/users\/danyaljj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danyaljj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danyaljj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danyaljj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danyaljj\/orgs","repos_url":"https:\/\/api.github.com\/users\/danyaljj\/repos","events_url":"https:\/\/api.github.com\/users\/danyaljj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danyaljj\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-22T21:59:44Z","updated_at":"2021-07-23T13:30:07Z","closed_at":"2021-07-23T13:30:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe training instances are not loaded properly. \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"qasc\", script_version='1.10.2')\r\n \r\ndef load_instances(split): \r\n instances = dataset[split]\r\n print(f\"split: {split} - size: {len(instances)}\")\r\n for x in instances:\r\n print(json.dumps(x))\r\n\r\n\r\nload_instances('test')\r\nload_instances('validation')\r\nload_instances('train')\r\n```\r\n\r\n## results\r\nFor test and validation, we can see the examples in the output (which is good!): \r\n```\r\nsplit: test - size: 920\r\n{\"answerKey\": \"\", \"choices\": {\"label\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\"], \"text\": [\"Anthax\", \"under water\", \"uterus\", \"wombs\", \"two\", \"moles\", \"live\", \"embryo\"]}, \"combinedfact\": \"\", \"fact1\": \"\", \"fact2\": \"\", \"formatted_question\": \"What type of birth do therian mammals have? (A) Anthax (B) under water (C) uterus (D) wombs (E) two (F) moles (G) live (H) embryo\", \"id\": \"3C44YUNSI1OBFBB8D36GODNOZN9DPA\", \"question\": \"What type of birth do therian mammals have?\"}\r\n{\"answerKey\": \"\", \"choices\": {\"label\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\"], \"text\": [\"Corvidae\", \"arthropods\", \"birds\", \"backbones\", \"keratin\", \"Jurassic\", \"front paws\", \"Parakeets.\"]}, \"combinedfact\": \"\", \"fact1\": \"\", \"fact2\": \"\", \"formatted_question\": \"By what time had mouse-sized viviparous mammals evolved? (A) Corvidae (B) arthropods (C) birds (D) backbones (E) keratin (F) Jurassic (G) front paws (H) Parakeets.\", \"id\": \"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ\", \"question\": \"By what time had mouse-sized viviparous mammals evolved?\"}\r\n{\"answerKey\": \"\", \"choices\": {\"label\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\"], \"text\": [\"Reduced friction\", \"causes infection\", \"vital to a good life\", \"prevents water loss\", \"camouflage from consumers\", \"Protection against predators\", \"spur the growth of the plant\", \"a smooth surface\"]}, \"combinedfact\": \"\", \"fact1\": \"\", \"fact2\": \"\", \"formatted_question\": \"What does a plant's skin do? (A) Reduced friction (B) causes infection (C) vital to a good life (D) prevents water loss (E) camouflage from consumers (F) Protection against predators (G) spur the growth of the plant (H) a smooth surface\", \"id\": \"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9\", \"question\": \"What does a plant's skin do?\"}\r\n...\r\n```\r\nHowever, only a few instances are loaded for the training split, which is not correct. \r\n\r\n## Environment info\r\n- `datasets` version: '1.10.2' \r\n- Platform: MaxOS \r\n- Python version:3.7\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2707","id":950812945,"node_id":"MDU6SXNzdWU5NTA4MTI5NDU=","number":2707,"title":"404 Not Found Error when loading LAMA dataset","user":{"login":"dwil2444","id":26467159,"node_id":"MDQ6VXNlcjI2NDY3MTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26467159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dwil2444","html_url":"https:\/\/github.com\/dwil2444","followers_url":"https:\/\/api.github.com\/users\/dwil2444\/followers","following_url":"https:\/\/api.github.com\/users\/dwil2444\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dwil2444\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dwil2444\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dwil2444\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dwil2444\/orgs","repos_url":"https:\/\/api.github.com\/users\/dwil2444\/repos","events_url":"https:\/\/api.github.com\/users\/dwil2444\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dwil2444\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-22T15:52:33Z","updated_at":"2021-07-26T14:29:07Z","closed_at":"2021-07-26T14:29:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The [LAMA](https:\/\/huggingface.co\/datasets\/viewer\/?dataset=lama) probing dataset is not available for download: \r\n\r\nSteps to Reproduce: \r\n\r\n1. `from datasets import load_dataset`\r\n2. `dataset = load_dataset('lama', 'trex')`. \r\n\r\n\r\nResults: \r\n`FileNotFoundError: Couldn't find file locally at lama\/lama.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.2\/datasets\/lama\/lama.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/lama\/lama.py`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706","id":950606561,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk1MTI3ODgz","number":2706,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T12:29:29Z","updated_at":"2021-07-22T12:43:00Z","closed_at":"2021-07-22T12:43:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2706","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706.patch","merged_at":"2021-07-22T12:43:00Z"},"body":"Update BibTeX entry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2705","id":950488583,"node_id":"MDU6SXNzdWU5NTA0ODg1ODM=","number":2705,"title":"404 not found error on loading WIKIANN dataset","user":{"login":"ronbutan","id":39296659,"node_id":"MDQ6VXNlcjM5Mjk2NjU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39296659?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ronbutan","html_url":"https:\/\/github.com\/ronbutan","followers_url":"https:\/\/api.github.com\/users\/ronbutan\/followers","following_url":"https:\/\/api.github.com\/users\/ronbutan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ronbutan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ronbutan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ronbutan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ronbutan\/orgs","repos_url":"https:\/\/api.github.com\/users\/ronbutan\/repos","events_url":"https:\/\/api.github.com\/users\/ronbutan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ronbutan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-22T09:55:50Z","updated_at":"2021-07-23T08:07:32Z","closed_at":"2021-07-23T08:07:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nUnable to retreive wikiann English dataset\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import list_datasets, load_dataset, list_metrics, load_metric\r\nWIKIANN = load_dataset(\"wikiann\",\"en\")\r\n```\r\n\r\n## Expected results\r\nColab notebook should display successful download status\r\n\r\n## Actual results\r\nFileNotFoundError: Couldn't find file at https:\/\/www.dropbox.com\/s\/12h3qqog6q4bjve\/panx_dataset.tar?dl=1\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.10.1\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.11\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704","id":950483980,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk1MDIzMTEz","number":2704,"title":"Fix pick default config name message","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T09:49:43Z","updated_at":"2021-07-22T10:02:41Z","closed_at":"2021-07-22T10:02:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2704","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704.patch","merged_at":"2021-07-22T10:02:40Z"},"body":"The error message to tell which config name to load is not displayed. \r\n\r\nThis is because in the code it was considering the config kwargs to be non-empty, which is a special case for custom configs created on the fly. It appears after this change: https:\/\/github.com\/huggingface\/datasets\/pull\/2659\r\n\r\nI fixed that by making the config kwargs empty by default, even if default parameters are passed\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2703","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2703","id":950482284,"node_id":"MDU6SXNzdWU5NTA0ODIyODQ=","number":2703,"title":"Bad message when config name is missing","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-07-22T09:47:23Z","updated_at":"2021-07-22T10:02:40Z","closed_at":"2021-07-22T10:02:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When loading a dataset that have several configurations, we expect to see an error message if the user doesn't specify a config name.\r\n\r\nHowever in `datasets` 1.10.0 and 1.10.1 it doesn't show the right message:\r\n\r\n```python\r\nimport datasets\r\n\r\ndatasets.load_dataset(\"glue\")\r\n```\r\nraises\r\n```python\r\nAttributeError: 'BuilderConfig' object has no attribute 'text_features'\r\n```\r\ninstead of\r\n```python\r\nValueError: Config name is missing.\r\nPlease pick one among the available configs: ['cola', 'sst2', 'mrpc', 'qqp', 'stsb', 'mnli', 'mnli_mismatched', 'mnli_matched', 'qnli', 'rte', 'wnli', 'ax']\r\nExample of usage:\r\n `load_dataset('glue', 'cola')`\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702","id":950448159,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0OTkyOTc1","number":2702,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T09:04:39Z","updated_at":"2021-07-22T09:17:39Z","closed_at":"2021-07-22T09:17:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2702","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702.patch","merged_at":"2021-07-22T09:17:38Z"},"body":"Update BibTeX entry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701","id":950422403,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0OTcxMzM3","number":2701,"title":"Fix download_mode docstrings","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T08:30:25Z","updated_at":"2021-07-22T09:33:31Z","closed_at":"2021-07-22T09:33:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2701","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701.patch","merged_at":"2021-07-22T09:33:31Z"},"body":"Fix `download_mode` docstrings.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2700","id":950276325,"node_id":"MDU6SXNzdWU5NTAyNzYzMjU=","number":2700,"title":"from datasets import Dataset is failing ","user":{"login":"kswamy15","id":5582286,"node_id":"MDQ6VXNlcjU1ODIyODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5582286?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kswamy15","html_url":"https:\/\/github.com\/kswamy15","followers_url":"https:\/\/api.github.com\/users\/kswamy15\/followers","following_url":"https:\/\/api.github.com\/users\/kswamy15\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kswamy15\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kswamy15\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kswamy15\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kswamy15\/orgs","repos_url":"https:\/\/api.github.com\/users\/kswamy15\/repos","events_url":"https:\/\/api.github.com\/users\/kswamy15\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kswamy15\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-22T03:51:23Z","updated_at":"2021-07-22T07:23:45Z","closed_at":"2021-07-22T07:09:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nfrom datasets import Dataset\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in ()\r\n 25 import posixpath\r\n 26 import requests\r\n---> 27 from tqdm.contrib.concurrent import thread_map\r\n 28 \r\n 29 from .. import __version__, config, utils\r\n\r\nModuleNotFoundError: No module named 'tqdm.contrib.concurrent'\r\n\r\n---------------------------------------------------------------------------\r\nNOTE: If your import is failing due to a missing package, you can\r\nmanually install dependencies using either !pip or !apt.\r\n\r\nTo view examples of installing some common dependencies, click the\r\n\"Open Examples\" button below.\r\n---------------------------------------------------------------------------\r\n\r\n## Environment info\r\n\r\n- `datasets` version: latest version as of 07\/21\/2021\r\n- Platform: Google Colab\r\n- Python version: 3.7\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2699","id":950221226,"node_id":"MDU6SXNzdWU5NTAyMjEyMjY=","number":2699,"title":"cannot combine splits merging and streaming?","user":{"login":"eyaler","id":4436747,"node_id":"MDQ6VXNlcjQ0MzY3NDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4436747?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eyaler","html_url":"https:\/\/github.com\/eyaler","followers_url":"https:\/\/api.github.com\/users\/eyaler\/followers","following_url":"https:\/\/api.github.com\/users\/eyaler\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eyaler\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eyaler\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eyaler\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eyaler\/orgs","repos_url":"https:\/\/api.github.com\/users\/eyaler\/repos","events_url":"https:\/\/api.github.com\/users\/eyaler\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eyaler\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-22T01:13:25Z","updated_at":"2021-07-22T08:27:47Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"this does not work:\r\n`dataset = datasets.load_dataset('mc4','iw',split='train+validation',streaming=True)`\r\nwith error:\r\n`ValueError: Bad split: train+validation. Available splits: ['train', 'validation']`\r\n\r\nthese work:\r\n`dataset = datasets.load_dataset('mc4','iw',split='train+validation')`\r\n`dataset = datasets.load_dataset('mc4','iw',split='train',streaming=True)`\r\n`dataset = datasets.load_dataset('mc4','iw',split='validation',streaming=True)`\r\n\r\ni could not find a reference to this in the documentation and the error message is confusing. also would be nice to allow streaming for the merged splits","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698","id":950159867,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NzUxMzMw","number":2698,"title":"Ignore empty batch when writing","user":{"login":"pcuenca","id":1177582,"node_id":"MDQ6VXNlcjExNzc1ODI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1177582?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pcuenca","html_url":"https:\/\/github.com\/pcuenca","followers_url":"https:\/\/api.github.com\/users\/pcuenca\/followers","following_url":"https:\/\/api.github.com\/users\/pcuenca\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pcuenca\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pcuenca\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pcuenca\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pcuenca\/orgs","repos_url":"https:\/\/api.github.com\/users\/pcuenca\/repos","events_url":"https:\/\/api.github.com\/users\/pcuenca\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pcuenca\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T22:35:30Z","updated_at":"2021-07-26T14:56:03Z","closed_at":"2021-07-26T13:25:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2698","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698.patch","merged_at":"2021-07-26T13:25:26Z"},"body":"This prevents an schema update with unknown column types, as reported in #2644.\r\n\r\nThis is my first attempt at fixing the issue. I tested the following:\r\n- First batch returned by a batched map operation is empty.\r\n- An intermediate batch is empty.\r\n- `python -m unittest tests.test_arrow_writer` passes.\r\n\r\nHowever, `arrow_writer` looks like a pretty generic interface, I'm not sure if there are other uses I may have overlooked. Let me know if that's the case, or if a better approach would be preferable.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697","id":950021623,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NjMyODg0","number":2697,"title":"Fix import on Colab","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-21T19:03:38Z","updated_at":"2021-07-22T07:09:08Z","closed_at":"2021-07-22T07:09:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2697","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697.patch","merged_at":"2021-07-22T07:09:06Z"},"body":"Fix #2695, fix #2700. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696","id":949901726,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NTMwODg3","number":2696,"title":"Add support for disable_progress_bar on Windows","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-21T16:34:53Z","updated_at":"2021-07-26T13:31:14Z","closed_at":"2021-07-26T09:38:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2696","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696.patch","merged_at":"2021-07-26T09:38:37Z"},"body":"This PR is a continuation of #2667 and adds support for `utils.disable_progress_bar()` on Windows when using multiprocessing. This [answer](https:\/\/stackoverflow.com\/a\/6596695\/14095927) on SO explains it nicely why the current approach (with calling `utils.is_progress_bar_enabled()` inside `Dataset._map_single`) would not work on Windows.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2695","id":949864823,"node_id":"MDU6SXNzdWU5NDk4NjQ4MjM=","number":2695,"title":"Cannot import load_dataset on Colab","user":{"login":"bayartsogt-ya","id":43239645,"node_id":"MDQ6VXNlcjQzMjM5NjQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43239645?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bayartsogt-ya","html_url":"https:\/\/github.com\/bayartsogt-ya","followers_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/followers","following_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/orgs","repos_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/repos","events_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-07-21T15:52:51Z","updated_at":"2021-07-22T07:26:25Z","closed_at":"2021-07-22T07:09:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nGot tqdm concurrent module not found error during importing load_dataset from datasets.\r\n\r\n## Steps to reproduce the bug\r\nHere [colab notebook](https:\/\/colab.research.google.com\/drive\/1pErWWnVP4P4mVHjSFUtkePd8Na_Qirg4?usp=sharing) to reproduce the error\r\n\r\nOn colab:\r\n```python\r\n!pip install datasets\r\nfrom datasets import load_dataset\r\n```\r\n\r\n## Expected results\r\nWorks without error\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n```\r\nModuleNotFoundError Traceback (most recent call last)\r\n in ()\r\n----> 1 from datasets import load_dataset, load_metric, Metric, MetricInfo, Features, Value\r\n 2 from sklearn.metrics import mean_squared_error\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/__init__.py in ()\r\n 31 )\r\n 32 \r\n---> 33 from .arrow_dataset import Dataset, concatenate_datasets\r\n 34 from .arrow_reader import ArrowReader, ReadInstruction\r\n 35 from .arrow_writer import ArrowWriter\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in ()\r\n 40 from tqdm.auto import tqdm\r\n 41 \r\n---> 42 from datasets.tasks.text_classification import TextClassification\r\n 43 \r\n 44 from . import config, utils\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/tasks\/__init__.py in ()\r\n 1 from typing import Optional\r\n 2 \r\n----> 3 from ..utils.logging import get_logger\r\n 4 from .automatic_speech_recognition import AutomaticSpeechRecognition\r\n 5 from .base import TaskTemplate\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/__init__.py in ()\r\n 19 \r\n 20 from . import logging\r\n---> 21 from .download_manager import DownloadManager, GenerateMode\r\n 22 from .file_utils import DownloadConfig, cached_path, hf_bucket_url, is_remote_url, temp_seed\r\n 23 from .mock_download_manager import MockDownloadManager\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/download_manager.py in ()\r\n 24 \r\n 25 from .. import config\r\n---> 26 from .file_utils import (\r\n 27 DownloadConfig,\r\n 28 cached_path,\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in ()\r\n 25 import posixpath\r\n 26 import requests\r\n---> 27 from tqdm.contrib.concurrent import thread_map\r\n 28 \r\n 29 from .. import __version__, config, utils\r\n\r\nModuleNotFoundError: No module named 'tqdm.contrib.concurrent'\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: 1.10.0\r\n- Platform: Colab\r\n- Python version: 3.7.11\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694","id":949844722,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NDg0NTcy","number":2694,"title":"fix: \ud83d\udc1b change string format to allow copy\/paste to work in bash","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T15:30:40Z","updated_at":"2021-07-22T10:41:47Z","closed_at":"2021-07-22T10:41:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2694","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694.patch","merged_at":"2021-07-22T10:41:47Z"},"body":"Before: copy\/paste resulted in an error because the square bracket\r\ncharacters `[]` are special characters in bash","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693","id":949797014,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NDQ1ODAz","number":2693,"title":"Fix OSCAR Esperanto","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T14:43:50Z","updated_at":"2021-07-21T14:53:52Z","closed_at":"2021-07-21T14:53:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2693","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693.patch","merged_at":"2021-07-21T14:53:51Z"},"body":"The Esperanto part (original) of OSCAR has the wrong number of examples:\r\n```python\r\nfrom datasets import load_dataset\r\nraw_datasets = load_dataset(\"oscar\", \"unshuffled_original_eo\")\r\n```\r\nraises\r\n```python\r\nNonMatchingSplitsSizesError:\r\n[{'expected': SplitInfo(name='train', num_bytes=314188336, num_examples=121171, dataset_name='oscar'),\r\n'recorded': SplitInfo(name='train', num_bytes=314064514, num_examples=121168, dataset_name='oscar')}]\r\n```\r\n\r\nI updated the number of expected examples in dataset_infos.json\r\n\r\ncc @sgugger ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692","id":949765484,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NDE4MDg1","number":2692,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T14:23:35Z","updated_at":"2021-07-21T15:31:41Z","closed_at":"2021-07-21T15:31:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2692","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692.patch","merged_at":"2021-07-21T15:31:40Z"},"body":"Update BibTeX entry","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2691","id":949758379,"node_id":"MDU6SXNzdWU5NDk3NTgzNzk=","number":2691,"title":"xtreme \/ pan-x cannot be downloaded","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-07-21T14:18:05Z","updated_at":"2021-07-26T09:34:22Z","closed_at":"2021-07-26T09:34:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nDataset xtreme \/ pan-x cannot be loaded\r\n\r\nSeems related to https:\/\/github.com\/huggingface\/datasets\/pull\/2326\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\ndataset = load_dataset(\"xtreme\", \"PAN-X.fr\")\r\n```\r\n\r\n## Expected results\r\n\r\nLoad the dataset\r\n\r\n## Actual results\r\n\r\n```\r\nFileNotFoundError: Couldn't find file at https:\/\/www.dropbox.com\/s\/12h3qqog6q4bjve\/panx_dataset.tar?dl=1\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: macOS-11.4-x86_64-i386-64bit\r\n- Python version: 3.8.11\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690","id":949574500,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0MjU5MDc1","number":2690,"title":"Docs details","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-21T10:43:14Z","updated_at":"2021-07-27T18:40:54Z","closed_at":"2021-07-27T18:40:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2690","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690.patch","merged_at":"2021-07-27T18:40:53Z"},"body":"Some comments here:\r\n\r\n- the code samples assume the expected libraries have already been installed. Maybe add a section at start, or add it to every code sample. Something like `pip install datasets transformers torch 'datasets[streaming]'` (maybe just link to https:\/\/huggingface.co\/docs\/datasets\/installation.html + a one-liner that installs all the requirements \/ alternatively a requirements.txt file)\r\n- \"If you\u2019d like to play with the examples, you must install it from source.\" in https:\/\/huggingface.co\/docs\/datasets\/installation.html: it's not clear to me what this means (what are these \"examples\"?)\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html: \"or AWS bucket if it\u2019s not already stored in the library\". It's the only place in the doc (aside from the docstring https:\/\/huggingface.co\/docs\/datasets\/package_reference\/loading_methods.html?highlight=aws bucket#datasets.list_datasets) where the \"AWS bucket\" is mentioned. It's not easy to understand what this means. Maybe explain more, and link to https:\/\/s3.amazonaws.com\/datasets.huggingface.co and\/or https:\/\/huggingface.co\/docs\/datasets\/filesystems.html.\r\n- example in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#manually-downloading-files is obsoleted by https:\/\/github.com\/huggingface\/datasets\/pull\/2326. Also: see https:\/\/github.com\/huggingface\/datasets\/issues\/2691 for a bug on this specific dataset.\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#manually-downloading-files the doc says \"After you\u2019ve downloaded the files, you can point to the folder hosting them locally with the data_dir argument as follows:\", but the following example does not show how to use `data_dir`\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#csv-files, it would be nice to have an URL to the csv loader reference (but I'm not sure there is one in the API reference). This comment applies in many places in the doc: I would want the API reference to contain doc for all the code\/functions\/classes... and I would want a lot more links inside the doc pointing to the API entries.\r\n- in the API reference (docstrings) I would prefer \"SOURCE\" to link to github instead of a copy of the code inside the docs site (eg. https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/load.py#L711 instead of https:\/\/huggingface.co\/docs\/datasets\/_modules\/datasets\/load.html#load_dataset)\r\n- it seems like not all the API is exposed in the doc. For example, there is no doc for [`disable_progress_bar`](https:\/\/github.com\/huggingface\/datasets\/search?q=disable_progress_bar), see https:\/\/huggingface.co\/docs\/datasets\/search.html?q=disable_progress_bar, even if the code contains docstrings. Does it mean that the function is not officially supported? (otherwise, maybe it also deserves a mention in https:\/\/huggingface.co\/docs\/datasets\/package_reference\/logging_methods.html)\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html?highlight=most%20efficient%20format%20have%20json%20files%20consisting%20multiple%20json%20objects#json-files, \"The most efficient format is to have JSON files consisting of multiple JSON objects, one per line, representing individual data rows:\", maybe link to https:\/\/en.wikipedia.org\/wiki\/JSON_streaming#Line-delimited_JSON and give it a name (\"line-delimited JSON\"? \"JSON Lines\" as in https:\/\/huggingface.co\/docs\/datasets\/processing.html#exporting-a-dataset-to-csv-json-parquet-or-to-python-objects ?)\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html, for the local files sections, it would be nice to provide sample csv \/ json \/ text files to download, so that it's easier for the reader to try to load them (instead: they won't try)\r\n- the doc explains how to shard a dataset, but does not explain why and when a dataset should be sharded (I have no idea... for [parallelizing](https:\/\/huggingface.co\/docs\/datasets\/processing.html#multiprocessing)?). It does neither give an idea of the number of shards a dataset typically should have and why.\r\n- the code example in https:\/\/huggingface.co\/docs\/datasets\/processing.html#mapping-in-a-distributed-setting does not work, because `training_args` has not been defined before in the doc.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2689","id":949447104,"node_id":"MDU6SXNzdWU5NDk0NDcxMDQ=","number":2689,"title":"cannot save the dataset to disk after rename_column","user":{"login":"PaulLerner","id":25532159,"node_id":"MDQ6VXNlcjI1NTMyMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25532159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulLerner","html_url":"https:\/\/github.com\/PaulLerner","followers_url":"https:\/\/api.github.com\/users\/PaulLerner\/followers","following_url":"https:\/\/api.github.com\/users\/PaulLerner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulLerner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulLerner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulLerner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulLerner\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulLerner\/repos","events_url":"https:\/\/api.github.com\/users\/PaulLerner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulLerner\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-21T08:13:40Z","updated_at":"2021-07-21T13:11:04Z","closed_at":"2021-07-21T13:11:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIf you use `rename_column` and do no other modification, you will be unable to save the dataset using `save_to_disk`\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nIn [1]: from datasets import Dataset, load_from_disk\r\nIn [5]: dataset=Dataset.from_dict({'foo': [0]})\r\nIn [7]: dataset.save_to_disk('foo')\r\nIn [8]: dataset=load_from_disk('foo')\r\nIn [10]: dataset=dataset.rename_column('foo', 'bar')\r\nIn [11]: dataset.save_to_disk('foo')\r\n---------------------------------------------------------------------------\r\nPermissionError Traceback (most recent call last)\r\n in \r\n----> 1 dataset.save_to_disk('foo')\r\n\r\n\/mnt\/beegfs\/projects\/meerqat\/anaconda3\/envs\/meerqat\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in save_to_disk(self, dataset_path\r\n, fs)\r\n 597 if Path(dataset_path, config.DATASET_ARROW_FILENAME) in cache_files_paths:\r\n 598 raise PermissionError(\r\n--> 599 f\"Tried to overwrite {Path(dataset_path, config.DATASET_ARROW_FILENAME)} but a dataset can't overwrite itself.\"\r\n 600 )\r\n 601 if Path(dataset_path, config.DATASET_INDICES_FILENAME) in cache_files_paths:\r\n\r\nPermissionError: Tried to overwrite foo\/dataset.arrow but a dataset can't overwrite itself.\r\n```\r\n\r\nN. B. I created the dataset from dict to enable easy reproduction but the same happens if you load an existing dataset (e.g. starting from `In [8]`)\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-3.10.0-1160.11.1.el7.x86_64-x86_64-with-centos-7.9.2009-Core\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2688","id":949182074,"node_id":"MDU6SXNzdWU5NDkxODIwNzQ=","number":2688,"title":"hebrew language codes he and iw should be treated as aliases","user":{"login":"eyaler","id":4436747,"node_id":"MDQ6VXNlcjQ0MzY3NDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4436747?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eyaler","html_url":"https:\/\/github.com\/eyaler","followers_url":"https:\/\/api.github.com\/users\/eyaler\/followers","following_url":"https:\/\/api.github.com\/users\/eyaler\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eyaler\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eyaler\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eyaler\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eyaler\/orgs","repos_url":"https:\/\/api.github.com\/users\/eyaler\/repos","events_url":"https:\/\/api.github.com\/users\/eyaler\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eyaler\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-20T23:13:52Z","updated_at":"2021-07-21T16:34:53Z","closed_at":"2021-07-21T16:34:53Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"https:\/\/huggingface.co\/datasets\/mc4 not listed when searching for hebrew datasets (he) as it uses the older language code iw, preventing discoverability. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687","id":948890481,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNjY1NDI2","number":2687,"title":"Minor documentation fix","user":{"login":"slowwavesleep","id":44175589,"node_id":"MDQ6VXNlcjQ0MTc1NTg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44175589?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slowwavesleep","html_url":"https:\/\/github.com\/slowwavesleep","followers_url":"https:\/\/api.github.com\/users\/slowwavesleep\/followers","following_url":"https:\/\/api.github.com\/users\/slowwavesleep\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slowwavesleep\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slowwavesleep\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slowwavesleep\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slowwavesleep\/orgs","repos_url":"https:\/\/api.github.com\/users\/slowwavesleep\/repos","events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T17:43:23Z","updated_at":"2021-07-21T13:04:55Z","closed_at":"2021-07-21T13:04:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2687","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687.patch","merged_at":"2021-07-21T13:04:55Z"},"body":"Currently, [Writing a dataset loading script](https:\/\/huggingface.co\/docs\/datasets\/add_dataset.html) page has a small error. A link to `matinf` dataset in [_Dataset scripts of reference_](https:\/\/huggingface.co\/docs\/datasets\/add_dataset.html#dataset-scripts-of-reference) section actually leads to `xsquad`, instead. This PR fixes that. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686","id":948811669,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNTk4OTE3","number":2686,"title":"Fix bad config ids that name cache directories","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T16:00:45Z","updated_at":"2021-07-20T16:27:15Z","closed_at":"2021-07-20T16:27:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2686","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686.patch","merged_at":"2021-07-20T16:27:14Z"},"body":"`data_dir=None` was considered a dataset config parameter, hence creating a special config_id for all dataset being loaded.\r\nSince the config_id is used to name the cache directories, this leaded to datasets being regenerated for users.\r\n\r\nI fixed this by ignoring the value of `data_dir` when it's `None` when computing the config_id.\r\nI also added a test to make sure the cache directories are not unexpectedly renamed in the future.\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2683","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2685","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2685\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2685\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2685\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2685","id":948791572,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNTgxNTk2","number":2685,"title":"Fix Blog Authorship Corpus dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-20T15:44:50Z","updated_at":"2021-07-21T13:11:58Z","closed_at":"2021-07-21T13:11:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2685","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2685","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2685.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2685.patch","merged_at":"2021-07-21T13:11:57Z"},"body":"This PR:\r\n- Update the JSON metadata file, which previously was raising a `NonMatchingSplitsSizesError`\r\n- Fix the codec of the data files (`latin_1` instead of `utf-8`), which previously was raising ` UnicodeDecodeError` for some files\r\n\r\nClose #2679.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2685\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2685\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2684","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2684\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2684\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2684\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2684","id":948771753,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNTY0MDY4","number":2684,"title":"Print absolute local paths in load_dataset error messages","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T15:28:28Z","updated_at":"2021-07-22T20:48:19Z","closed_at":"2021-07-22T14:01:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2684","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2684","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2684.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2684.patch","merged_at":"2021-07-22T14:01:10Z"},"body":"Use absolute local paths in the error messages of `load_dataset` as per @stas00's suggestion in https:\/\/github.com\/huggingface\/datasets\/pull\/2500#issuecomment-874891223 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2684\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2684\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2683","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2683\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2683\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2683\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2683","id":948721379,"node_id":"MDU6SXNzdWU5NDg3MjEzNzk=","number":2683,"title":"Cache directories changed due to recent changes in how config kwargs are handled","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-07-20T14:37:57Z","updated_at":"2021-07-20T16:27:15Z","closed_at":"2021-07-20T16:27:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Since #2659 I can see weird cache directory names with hashes in the config id, even though no additional config kwargs are passed. For example:\r\n\r\n```python\r\nfrom datasets import load_dataset_builder\r\n\r\nc4_builder = load_dataset_builder(\"c4\", \"en\")\r\nprint(c4_builder.cache_dir)\r\n# \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/c4\/en-174d3b7155eb68db\/0.0.0\/...\r\n\r\n# instead of \r\n# \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/c4\/en\/0.0.0\/...\r\n```\r\nThis issue could be annoying since it would simply ignore old cache directories for users, and regenerate datasets\r\n\r\ncc @stas00 this is what you experienced a few days ago\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2683\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2683\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2682","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2682\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2682\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2682\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2682","id":948713137,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNTE2NjU2","number":2682,"title":"Fix c4 expected files","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T14:29:31Z","updated_at":"2021-07-20T14:38:11Z","closed_at":"2021-07-20T14:38:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2682","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2682","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2682.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2682.patch","merged_at":"2021-07-20T14:38:10Z"},"body":"Some files were not registered in the list of expected files to download\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2677","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2682\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2682\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2681","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2681\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2681\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2681\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2681","id":948708645,"node_id":"MDU6SXNzdWU5NDg3MDg2NDU=","number":2681,"title":"5 duplicate datasets","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-20T14:25:00Z","updated_at":"2021-07-20T15:44:17Z","closed_at":"2021-07-20T15:44:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nIn 5 cases, I could find a dataset on Paperswithcode which references two Hugging Face datasets as dataset loaders. They are:\r\n\r\n- https:\/\/paperswithcode.com\/dataset\/multinli -> https:\/\/huggingface.co\/datasets\/multi_nli and https:\/\/huggingface.co\/datasets\/multi_nli_mismatch\r\n \r\n \"Capture\r\n\r\n- https:\/\/paperswithcode.com\/dataset\/squad -> https:\/\/huggingface.co\/datasets\/squad and https:\/\/huggingface.co\/datasets\/squad_v2\r\n- https:\/\/paperswithcode.com\/dataset\/narrativeqa -> https:\/\/huggingface.co\/datasets\/narrativeqa and https:\/\/huggingface.co\/datasets\/narrativeqa_manual\r\n- https:\/\/paperswithcode.com\/dataset\/hate-speech-and-offensive-language -> https:\/\/huggingface.co\/datasets\/hate_offensive and https:\/\/huggingface.co\/datasets\/hate_speech_offensive\r\n- https:\/\/paperswithcode.com\/dataset\/newsph-nli -> https:\/\/huggingface.co\/datasets\/newsph and https:\/\/huggingface.co\/datasets\/newsph_nli\r\n\r\nPossible solutions:\r\n- don't fix (it works)\r\n- for each pair of duplicate datasets, remove one, and create an alias to the other.\r\n\r\n## Steps to reproduce the bug\r\n\r\nVisit the Paperswithcode links, and look at the \"Dataset Loaders\" section\r\n\r\n## Expected results\r\n\r\nThere should only be one reference to a Hugging Face dataset loader\r\n\r\n## Actual results\r\n\r\nTwo Hugging Face dataset loaders\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2681\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2681\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2680","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2680\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2680\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2680\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2680","id":948649716,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNDYyNzY3","number":2680,"title":"feat: \ud83c\udfb8 add paperswithcode id for qasper dataset","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T13:22:29Z","updated_at":"2021-07-20T14:04:10Z","closed_at":"2021-07-20T14:04:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2680","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2680","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2680.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2680.patch","merged_at":"2021-07-20T14:04:10Z"},"body":"The reverse reference exists on paperswithcode:\r\nhttps:\/\/paperswithcode.com\/dataset\/qasper","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2680\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2680\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2679","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2679\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2679\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2679\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2679","id":948506638,"node_id":"MDU6SXNzdWU5NDg1MDY2Mzg=","number":2679,"title":"Cannot load the blog_authorship_corpus due to codec errors","user":{"login":"izaskr","id":38069449,"node_id":"MDQ6VXNlcjM4MDY5NDQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38069449?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/izaskr","html_url":"https:\/\/github.com\/izaskr","followers_url":"https:\/\/api.github.com\/users\/izaskr\/followers","following_url":"https:\/\/api.github.com\/users\/izaskr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/izaskr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/izaskr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/izaskr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/izaskr\/orgs","repos_url":"https:\/\/api.github.com\/users\/izaskr\/repos","events_url":"https:\/\/api.github.com\/users\/izaskr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/izaskr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-07-20T10:13:20Z","updated_at":"2021-07-21T17:02:21Z","closed_at":"2021-07-21T13:11:58Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA codec error is raised while loading the blog_authorship_corpus. \r\n\r\n## Steps to reproduce the bug\r\n```\r\nfrom datasets import load_dataset\r\nraw_datasets = load_dataset(\"blog_authorship_corpus\")\r\n```\r\n\r\n\r\n## Expected results\r\nLoading the dataset without errors.\r\n\r\n## Actual results\r\nAn error similar to the one below was raised for (what seems like) every XML file.\r\n\/home\/izaskr\/.cache\/huggingface\/datasets\/downloads\/extracted\/7cf52524f6517e168604b41c6719292e8f97abbe8f731e638b13423f4212359a\/blogs\/788358.male.24.Arts.Libra.xml cannot be loaded. Error message: 'utf-8' codec can't decode byte 0xe7 in position 7551: invalid continuation byte\r\n\r\nTraceback (most recent call last): \r\n File \"\", line 1, in \r\n File \"\/home\/izaskr\/anaconda3\/envs\/local_vae_older\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 856, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/izaskr\/anaconda3\/envs\/local_vae_older\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 583, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/izaskr\/anaconda3\/envs\/local_vae_older\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 671, in _download_and_prepare\r\n verify_splits(self.info.splits, split_dict)\r\n File \"\/home\/izaskr\/anaconda3\/envs\/local_vae_older\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 74, in verify_splits\r\n raise NonMatchingSplitsSizesError(str(bad_splits))\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=610252351, num_examples=532812, dataset_name='blog_authorship_corpus'), 'recorded': SplitInfo(name='train', num_bytes=614706451, num_examples=535568, dataset_name='blog_authorship_corpus')}, {'expected': SplitInfo(name='validation', num_bytes=37500394, num_examples=31277, dataset_name='blog_authorship_corpus'), 'recorded': SplitInfo(name='validation', num_bytes=32553710, num_examples=28521, dataset_name='blog_authorship_corpus')}]\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-4.15.0-132-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.8\r\n- PyArrow version: 4.0.1\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2679\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2679\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2678","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2678\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2678\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2678\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2678","id":948471222,"node_id":"MDU6SXNzdWU5NDg0NzEyMjI=","number":2678,"title":"Import Error in Kaggle notebook","user":{"login":"prikmm","id":47216475,"node_id":"MDQ6VXNlcjQ3MjE2NDc1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47216475?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/prikmm","html_url":"https:\/\/github.com\/prikmm","followers_url":"https:\/\/api.github.com\/users\/prikmm\/followers","following_url":"https:\/\/api.github.com\/users\/prikmm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/prikmm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/prikmm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/prikmm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/prikmm\/orgs","repos_url":"https:\/\/api.github.com\/users\/prikmm\/repos","events_url":"https:\/\/api.github.com\/users\/prikmm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/prikmm\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-20T09:28:38Z","updated_at":"2021-07-21T13:59:26Z","closed_at":"2021-07-21T13:03:02Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nNot able to import datasets library in kaggle notebooks\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n!pip install datasets\r\nimport datasets\r\n```\r\n\r\n## Expected results\r\nNo such error\r\n\r\n## Actual results\r\n```\r\nImportError Traceback (most recent call last)\r\n in \r\n----> 1 import datasets\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/__init__.py in \r\n 31 )\r\n 32 \r\n---> 33 from .arrow_dataset import Dataset, concatenate_datasets\r\n 34 from .arrow_reader import ArrowReader, ReadInstruction\r\n 35 from .arrow_writer import ArrowWriter\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in \r\n 36 import pandas as pd\r\n 37 import pyarrow as pa\r\n---> 38 import pyarrow.compute as pc\r\n 39 from multiprocess import Pool, RLock\r\n 40 from tqdm.auto import tqdm\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/pyarrow\/compute.py in \r\n 16 # under the License.\r\n 17 \r\n---> 18 from pyarrow._compute import ( # noqa\r\n 19 Function,\r\n 20 FunctionOptions,\r\n\r\nImportError: \/opt\/conda\/lib\/python3.7\/site-packages\/pyarrow\/_compute.cpython-37m-x86_64-linux-gnu.so: undefined symbol: _ZNK5arrow7compute15KernelSignature8ToStringEv\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Kaggle\r\n- Python version: 3.7.10\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2678\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2678\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2677","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2677\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2677\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2677\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2677","id":948429788,"node_id":"MDU6SXNzdWU5NDg0Mjk3ODg=","number":2677,"title":"Error when downloading C4","user":{"login":"Aktsvigun","id":36672861,"node_id":"MDQ6VXNlcjM2NjcyODYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36672861?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Aktsvigun","html_url":"https:\/\/github.com\/Aktsvigun","followers_url":"https:\/\/api.github.com\/users\/Aktsvigun\/followers","following_url":"https:\/\/api.github.com\/users\/Aktsvigun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Aktsvigun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Aktsvigun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Aktsvigun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Aktsvigun\/orgs","repos_url":"https:\/\/api.github.com\/users\/Aktsvigun\/repos","events_url":"https:\/\/api.github.com\/users\/Aktsvigun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Aktsvigun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-07-20T08:37:30Z","updated_at":"2021-07-20T14:41:31Z","closed_at":"2021-07-20T14:38:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI am trying to download `en` corpus from C4 dataset. However, I get an error caused by validation files download (see image). My code is very primitive:\r\n`datasets.load_dataset('c4', 'en')`\r\n\r\nIs this a bug or do I have some configurations missing on my server? \r\nThanks!\r\n\r\n\r\n\"\u0421\u043d\u0438\u043c\u043e\u043a","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2677\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2677\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2676","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2676\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2676\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2676\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2676","id":947734909,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyNjc2NTg5","number":2676,"title":"Increase json reader block_size automatically","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-19T14:51:14Z","updated_at":"2021-07-19T17:51:39Z","closed_at":"2021-07-19T17:51:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2676","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2676","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2676.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2676.patch","merged_at":"2021-07-19T17:51:38Z"},"body":"Currently some files can't be read with the default parameters of the JSON lines reader.\r\nFor example this one:\r\nhttps:\/\/huggingface.co\/datasets\/thomwolf\/codeparrot\/resolve\/main\/file-000000000006.json.gz\r\n\r\nraises a pyarrow error:\r\n```python\r\nArrowInvalid: straddling object straddles two block boundaries (try to increase block size?)\r\n```\r\n\r\nThe block size that is used is the default one by pyarrow (related to this [jira issue](https:\/\/issues.apache.org\/jira\/browse\/ARROW-9612)).\r\n\r\nTo fix this issue I changed the block_size to increase automatically if there is a straddling issue when parsing a batch of json lines.\r\n\r\nBy default the value is `chunksize \/\/ 32` in order to leverage multithreading, and it doubles every time a straddling issue occurs. The block_size is then reset for each file.\r\n\r\ncc @thomwolf @albertvillanova ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2676\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2676\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2675","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2675\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2675\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2675\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2675","id":947657732,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyNjEwNTA1","number":2675,"title":"Parallelize ETag requests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-19T13:30:42Z","updated_at":"2021-07-19T19:33:25Z","closed_at":"2021-07-19T19:33:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2675","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2675","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2675.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2675.patch","merged_at":"2021-07-19T19:33:25Z"},"body":"Since https:\/\/github.com\/huggingface\/datasets\/pull\/2628 we use the ETag or the remote data files to compute the directory in the cache where a dataset is saved. This is useful in order to reload the dataset from the cache only if the remote files haven't changed.\r\n\r\nIn this I made the ETag requests parallel using multithreading. There is also a tqdm progress bar that shows up if there are more than 16 data files.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2675\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2675\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2674","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2674\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2674\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2674\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2674","id":947338202,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyMzMzODU3","number":2674,"title":"Fix sacrebleu parameter name","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-19T07:07:26Z","updated_at":"2021-07-19T08:07:03Z","closed_at":"2021-07-19T08:07:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2674","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2674","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2674.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2674.patch","merged_at":"2021-07-19T08:07:03Z"},"body":"DONE:\r\n- Fix parameter name: `smooth` to `smooth_method`.\r\n- Improve kwargs description.\r\n- Align docs on using a metric.\r\n- Add example of passing additional arguments in using metrics.\r\n\r\nRelated to #2669.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2674\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2674\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2673","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2673\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2673\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2673\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2673","id":947300008,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyMzAxMTgw","number":2673,"title":"Fix potential DuplicatedKeysError in SQuAD","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-19T06:08:00Z","updated_at":"2021-07-19T07:08:03Z","closed_at":"2021-07-19T07:08:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2673","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2673","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2673.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2673.patch","merged_at":"2021-07-19T07:08:03Z"},"body":"DONE:\r\n- Fix potential DiplicatedKeysError by ensuring keys are unique.\r\n- Align examples in the docs with SQuAD code.\r\n\r\nWe should promote as a good practice, that the keys should be programmatically generated as unique, instead of read from data (which might be not unique).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2673\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2673\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2672","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2672\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2672\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2672\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2672","id":947294605,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyMjk2NDQ4","number":2672,"title":"Fix potential DuplicatedKeysError in LibriSpeech","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-19T06:00:49Z","updated_at":"2021-07-19T06:28:57Z","closed_at":"2021-07-19T06:28:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2672","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2672","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2672.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2672.patch","merged_at":"2021-07-19T06:28:56Z"},"body":"DONE:\r\n- Fix unnecessary path join.\r\n- Fix potential DiplicatedKeysError by ensuring keys are unique.\r\n\r\nWe should promote as a good practice, that the keys should be programmatically generated as unique, instead of read from data (which might be not unique).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2672\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2672\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2671","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2671\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2671\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2671\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2671","id":947273875,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyMjc5MTM0","number":2671,"title":"Mesinesp development and training data sets have been added.","user":{"login":"aslihanuysall","id":32900185,"node_id":"MDQ6VXNlcjMyOTAwMTg1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32900185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aslihanuysall","html_url":"https:\/\/github.com\/aslihanuysall","followers_url":"https:\/\/api.github.com\/users\/aslihanuysall\/followers","following_url":"https:\/\/api.github.com\/users\/aslihanuysall\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aslihanuysall\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aslihanuysall\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aslihanuysall\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aslihanuysall\/orgs","repos_url":"https:\/\/api.github.com\/users\/aslihanuysall\/repos","events_url":"https:\/\/api.github.com\/users\/aslihanuysall\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aslihanuysall\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-19T05:14:38Z","updated_at":"2021-07-19T07:32:28Z","closed_at":"2021-07-19T06:45:50Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2671","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2671","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2671.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2671.patch","merged_at":null},"body":"https:\/\/zenodo.org\/search?page=1&size=20&q=mesinesp, Mesinesp has Medical Semantic Indexed records in Spanish. Indexing is done using DeCS codes, a sort of Spanish equivalent to MeSH terms.\r\nThe Mesinesp (Spanish BioASQ track, see https:\/\/temu.bsc.es\/mesinesp) development set has a total of 750 records.\r\nThe Mesinesp (Spanish BioASQ track, see https:\/\/temu.bsc.es\/mesinesp) training set has a total of 369,368 records. \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2671\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2671\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2670","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2670\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2670\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2670\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2670","id":947120709,"node_id":"MDU6SXNzdWU5NDcxMjA3MDk=","number":2670,"title":"Using sharding to parallelize indexing","user":{"login":"ggdupont","id":5583410,"node_id":"MDQ6VXNlcjU1ODM0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5583410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ggdupont","html_url":"https:\/\/github.com\/ggdupont","followers_url":"https:\/\/api.github.com\/users\/ggdupont\/followers","following_url":"https:\/\/api.github.com\/users\/ggdupont\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ggdupont\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ggdupont\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ggdupont\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ggdupont\/orgs","repos_url":"https:\/\/api.github.com\/users\/ggdupont\/repos","events_url":"https:\/\/api.github.com\/users\/ggdupont\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ggdupont\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-18T21:26:26Z","updated_at":"2021-10-07T13:33:25Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nCreating an elasticsearch index on large dataset could be quite long and cannot be parallelized on shard (the index creation is colliding)\r\n\r\n**Describe the solution you'd like**\r\nWhen working on dataset shards, if an index already exists, its mapping should be checked and if compatible, the indexing process should continue with the shard data. \r\n\r\nAdditionally, at the end of the process, the `_indexes` dict should be send back to the original dataset object (from which the shards have been created) to allow to use the index for later filtering on the whole dataset.\r\n\r\n**Describe alternatives you've considered**\r\nEach dataset shard could created independent partial indices. then on the whole dataset level, indices should be all referred in `_indexes` dict and be used in querying through `get_nearest_examples()`. The drawback is that the scores will be computed independently on the partial indices leading to inconsistent values for most scoring based on corpus level statistics (tf\/idf, BM25).\r\n\r\n**Additional context**\r\nThe objectives is to parallelize the index creation to speed-up the process (ie surcharging the ES server which is fine to handle large load) while later enabling search on the whole dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2670\/reactions","total_count":3,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2670\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2669","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2669\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2669\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2669\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2669","id":946982998,"node_id":"MDU6SXNzdWU5NDY5ODI5OTg=","number":2669,"title":"Metric kwargs are not passed to underlying external metric f1_score","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-07-18T08:32:31Z","updated_at":"2021-07-18T18:36:05Z","closed_at":"2021-07-18T11:19:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen I want to use F1 score with average=\"min\", this keyword argument does not seem to be passed through to the underlying sklearn metric. This is evident because [sklearn](https:\/\/scikit-learn.org\/stable\/modules\/generated\/sklearn.metrics.f1_score.html) throws an error telling me so.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\nf1 = datasets.load_metric(\"f1\", keep_in_memory=True, average=\"min\")\r\nf1.add_batch(predictions=[0,2,3], references=[1, 2, 3])\r\nf1.compute()\r\n```\r\n\r\n## Expected results\r\nNo error, because `average=\"min\"` should be passed correctly to f1_score in sklearn.\r\n\r\n## Actual results\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\datasets\\metric.py\", line 402, in compute\r\n output = self._compute(predictions=predictions, references=references, **kwargs)\r\n File \"C:\\Users\\bramv\\.cache\\huggingface\\modules\\datasets_modules\\metrics\\f1\\82177930a325d4c28342bba0f116d73f6d92fb0c44cd67be32a07c1262b61cfe\\f1.py\", line 97, in _compute\r\n \"f1\": f1_score(\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\utils\\validation.py\", line 63, in inner_f\r\n return f(*args, **kwargs)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\metrics\\_classification.py\", line 1071, in f1_score\r\n return fbeta_score(y_true, y_pred, beta=1, labels=labels,\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\utils\\validation.py\", line 63, in inner_f\r\n return f(*args, **kwargs)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\metrics\\_classification.py\", line 1195, in fbeta_score\r\n _, _, f, _ = precision_recall_fscore_support(y_true, y_pred,\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\utils\\validation.py\", line 63, in inner_f\r\n return f(*args, **kwargs)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\metrics\\_classification.py\", line 1464, in precision_recall_fscore_support\r\n labels = _check_set_wise_labels(y_true, y_pred, average, labels,\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\metrics\\_classification.py\", line 1294, in _check_set_wise_labels\r\n raise ValueError(\"Target is %s but average='binary'. Please \"\r\nValueError: Target is multiclass but average='binary'. Please choose another average setting, one of [None, 'micro', 'macro', 'weighted'].\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.9.2\r\n- PyArrow version: 4.0.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2669\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2669\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2668","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2668\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2668\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2668\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2668","id":946867622,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxOTY1MTY1","number":2668,"title":"Add Russian SuperGLUE","user":{"login":"slowwavesleep","id":44175589,"node_id":"MDQ6VXNlcjQ0MTc1NTg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44175589?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slowwavesleep","html_url":"https:\/\/github.com\/slowwavesleep","followers_url":"https:\/\/api.github.com\/users\/slowwavesleep\/followers","following_url":"https:\/\/api.github.com\/users\/slowwavesleep\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slowwavesleep\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slowwavesleep\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slowwavesleep\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slowwavesleep\/orgs","repos_url":"https:\/\/api.github.com\/users\/slowwavesleep\/repos","events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-17T17:41:28Z","updated_at":"2021-07-29T11:50:31Z","closed_at":"2021-07-29T11:50:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2668","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2668","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2668.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2668.patch","merged_at":"2021-07-29T11:50:30Z"},"body":"Hi,\r\n\r\nThis adds the [Russian SuperGLUE](https:\/\/russiansuperglue.com\/) dataset. For the most part I reused the code for the original SuperGLUE, although there are some relatively minor differences in the structure that I accounted for.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2668\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2668\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2667","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2667\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2667\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2667\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2667","id":946861908,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxOTYwNzc3","number":2667,"title":"Use tqdm from tqdm_utils","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-17T17:06:35Z","updated_at":"2021-07-19T17:39:10Z","closed_at":"2021-07-19T17:32:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2667","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2667","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2667.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2667.patch","merged_at":"2021-07-19T17:32:00Z"},"body":"This PR replaces `tqdm` from the `tqdm` lib with `tqdm` from `datasets.utils.tqdm_utils`. With this change, it's possible to disable progress bars just by calling `disable_progress_bar`. Note this doesn't work on Windows when using multiprocessing due to how global variables are shared between processes. Currently, there is no easy way to disable progress bars in a multiprocess setting on Windows (patching logging with `datasets.utils.logging.get_verbosity = lambda: datasets.utils.logging.NOTSET` doesn't seem to work as well), so adding support for this is a future goal. Additionally, this PR adds a unit (\"ba\" for batches) to the bar printed by `Dataset.to_json` (this change is motivated by https:\/\/github.com\/huggingface\/datasets\/issues\/2657).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2667\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2667\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2666","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2666\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2666\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2666\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2666","id":946825140,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxOTMzMDM1","number":2666,"title":"Adds CodeClippy dataset [WIP]","user":{"login":"arampacha","id":69807323,"node_id":"MDQ6VXNlcjY5ODA3MzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69807323?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arampacha","html_url":"https:\/\/github.com\/arampacha","followers_url":"https:\/\/api.github.com\/users\/arampacha\/followers","following_url":"https:\/\/api.github.com\/users\/arampacha\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arampacha\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arampacha\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arampacha\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arampacha\/orgs","repos_url":"https:\/\/api.github.com\/users\/arampacha\/repos","events_url":"https:\/\/api.github.com\/users\/arampacha\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arampacha\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-17T13:32:04Z","updated_at":"2021-07-19T09:09:54Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2666","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2666","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2666.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2666.patch","merged_at":null},"body":"CodeClippy is an opensource code dataset scrapped from github during flax-jax-community-week\r\nhttps:\/\/the-eye.eu\/public\/AI\/training_data\/code_clippy_data\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2666\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2666\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2665","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2665\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2665\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2665\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2665","id":946822036,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxOTMwNjky","number":2665,"title":"Adds APPS dataset to the hub [WIP]","user":{"login":"arampacha","id":69807323,"node_id":"MDQ6VXNlcjY5ODA3MzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69807323?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arampacha","html_url":"https:\/\/github.com\/arampacha","followers_url":"https:\/\/api.github.com\/users\/arampacha\/followers","following_url":"https:\/\/api.github.com\/users\/arampacha\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arampacha\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arampacha\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arampacha\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arampacha\/orgs","repos_url":"https:\/\/api.github.com\/users\/arampacha\/repos","events_url":"https:\/\/api.github.com\/users\/arampacha\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arampacha\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-17T13:13:17Z","updated_at":"2021-07-17T17:56:47Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2665","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2665","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2665.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2665.patch","merged_at":null},"body":"A loading script for [APPS dataset](https:\/\/github.com\/hendrycks\/apps) ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2665\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2665\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2663","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2663\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2663\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2663\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2663","id":946552273,"node_id":"MDU6SXNzdWU5NDY1NTIyNzM=","number":2663,"title":"[`to_json`] add multi-proc sharding support","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-16T19:41:50Z","updated_at":"2021-09-13T13:56:37Z","closed_at":"2021-09-13T13:56:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As discussed on slack it appears that `to_json` is quite slow on huge datasets like OSCAR.\r\n\r\nI implemented sharded saving, which is much much faster - but the tqdm bars all overwrite each other, so it's hard to make sense of the progress, so if possible ideally this multi-proc support could be implemented internally in `to_json` via `num_proc` argument. I guess `num_proc` will be the number of shards?\r\n\r\nI think the user will need to use this feature wisely, since too many processes writing to say normal style HD is likely to be slower than one process.\r\n\r\nI'm not sure whether the user should be responsible to concatenate the shards at the end or `datasets`, either way works for my needs.\r\n\r\nThe code I was using:\r\n\r\n```\r\nfrom multiprocessing import cpu_count, Process, Queue\r\n\r\n[...]\r\n\r\nfiltered_dataset = concat_dataset.map(filter_short_documents, batched=True, batch_size=256, num_proc=cpu_count())\r\n\r\nDATASET_NAME = \"oscar\"\r\nSHARDS = 10\r\ndef process_shard(idx):\r\n print(f\"Sharding {idx}\")\r\n ds_shard = filtered_dataset.shard(SHARDS, idx, contiguous=True)\r\n # ds_shard = ds_shard.shuffle() # remove contiguous=True above if shuffling\r\n print(f\"Saving {DATASET_NAME}-{idx}.jsonl\")\r\n ds_shard.to_json(f\"{DATASET_NAME}-{idx}.jsonl\", orient=\"records\", lines=True, force_ascii=False)\r\n\r\nqueue = Queue()\r\nprocesses = [Process(target=process_shard, args=(idx,)) for idx in range(SHARDS)]\r\nfor p in processes:\r\n p.start()\r\n\r\nfor p in processes:\r\n p.join()\r\n```\r\n\r\nThank you!\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2663\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2663\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2662","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2662\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2662\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2662\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2662","id":946470815,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxNjM5MjU5","number":2662,"title":"Load Dataset from the Hub (NO DATASET SCRIPT)","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-07-16T17:21:58Z","updated_at":"2021-08-25T14:53:01Z","closed_at":"2021-08-25T14:18:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2662","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2662","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2662.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2662.patch","merged_at":"2021-08-25T14:18:08Z"},"body":"## Load the data from any Dataset repository on the Hub\r\n\r\nThis PR adds support for loading datasets from any dataset repository on the hub, without requiring any dataset script.\r\n\r\nAs a user it's now possible to create a repo and upload some csv\/json\/text\/parquet files, and then be able to load the data in one line. Here is an example with the `allenai\/c4` repository that contains a lot of compressed json lines files:\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndata_files = {\"train\": \"en\/c4-train.*.json.gz\"}\r\nc4 = load_dataset(\"allenai\/c4\", data_files=data_files, split=\"train\", streaming=True)\r\n\r\nprint(c4.n_shards)\r\n# 1024\r\nprint(next(iter(c4)))\r\n# {'text': 'Beginners BBQ Class Takin...'}\r\n```\r\n\r\nBy default it loads all the files, but as shown in the example you can choose the ones you want with unix style patterns.\r\n\r\nOf course it's still possible to use dataset scripts since they offer the most flexibility.\r\n\r\n## Implementation details\r\n\r\nIt uses `huggingface_hub` to list the files in a dataset repository.\r\n\r\nIf you provide a path to a local directory instead of a repository name, it works the same way but it uses `glob`.\r\n\r\nDepending on the data files available, or passed in the `data_files` parameter, one of the available builders will be used among the csv, json, text and parquet builders.\r\n\r\nBecause of this, it's not possible to load both csv and json files at once. In this case you have to load them separately and then concatenate the two datasets for example.\r\n\r\n## TODO\r\n\r\n- [x] tests\r\n- [x] docs\r\n- [x] when huggingface_hub gets a new release, update the CI and the setup.py\r\n\r\nClose https:\/\/github.com\/huggingface\/datasets\/issues\/2629","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2662\/reactions","total_count":5,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":5,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2662\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2661","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2661\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2661\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2661\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2661","id":946446967,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxNjE5MzAz","number":2661,"title":"Add SD task for SUPERB","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2021-07-16T16:43:21Z","updated_at":"2021-08-04T17:03:53Z","closed_at":"2021-08-04T17:03:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2661","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2661","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2661.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2661.patch","merged_at":"2021-08-04T17:03:52Z"},"body":"Include the SD (Speaker Diarization) task as described in the [SUPERB paper](https:\/\/arxiv.org\/abs\/2105.01051) and `s3prl` [instructions](https:\/\/github.com\/s3prl\/s3prl\/tree\/master\/s3prl\/downstream#sd-speaker-diarization).\r\n\r\nTODO:\r\n- [x] Generate the LibriMix corpus\r\n- [x] Prepare the corpus for diarization\r\n- [x] Upload these files to the superb-data repo\r\n- [x] Transcribe the corresponding s3prl processing of these files into our superb loading script\r\n- [x] README: tags + description sections\r\n- ~~Add DER metric~~ (we leave the DER metric for a follow-up PR)\r\n\r\nRelated to #2619.\r\n\r\nClose #2653.\r\n\r\ncc: @lewtun ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2661\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2661\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2660","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2660\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2660\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2660\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2660","id":946316180,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxNTA4NzE0","number":2660,"title":"Move checks from _map_single to map","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-16T13:53:33Z","updated_at":"2021-09-06T14:12:23Z","closed_at":"2021-09-06T14:12:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2660","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2660","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2660.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2660.patch","merged_at":"2021-09-06T14:12:23Z"},"body":"The goal of this PR is to remove duplicated checks in the `map` logic to execute them only once whenever possible (`fn_kwargs`, `input_columns`, ...). Additionally, this PR improves the consistency (to align it with `input_columns`) of the `remove_columns` check by adding support for a single string value, which is then wrapped into a list. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2660\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2660\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2659","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2659\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2659\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2659\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2659","id":946155407,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxMzcwNzU3","number":2659,"title":"Allow dataset config kwargs to be None","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-16T10:25:38Z","updated_at":"2021-07-16T12:46:07Z","closed_at":"2021-07-16T12:46:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2659","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2659","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2659.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2659.patch","merged_at":"2021-07-16T12:46:06Z"},"body":"Close https:\/\/github.com\/huggingface\/datasets\/issues\/2658\r\n\r\nThe dataset config kwargs that were set to None we simply ignored.\r\nThis was an issue when None has some meaning for certain parameters of certain builders, like the `sep` parameter of the \"csv\" builder that allows to infer to separator.\r\n\r\ncc @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2659\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2659\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2658","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2658\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2658\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2658\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2658","id":946139532,"node_id":"MDU6SXNzdWU5NDYxMzk1MzI=","number":2658,"title":"Can't pass `sep=None` to load_dataset(\"csv\", ...) to infer the separator via pandas.read_csv","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-07-16T10:05:44Z","updated_at":"2021-07-16T12:46:06Z","closed_at":"2021-07-16T12:46:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When doing `load_dataset(\"csv\", sep=None)`, the `sep` passed to `pd.read_csv` is still the default `sep=\",\"` instead, which makes it impossible to make the csv loader infer the separator.\r\n\r\nRelated to https:\/\/github.com\/huggingface\/datasets\/pull\/2656\r\n\r\ncc @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2658\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2658\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2657","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2657\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2657\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2657\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2657","id":945822829,"node_id":"MDU6SXNzdWU5NDU4MjI4Mjk=","number":2657,"title":"`to_json` reporting enhancements","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-15T23:32:18Z","updated_at":"2021-07-15T23:33:53Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"While using `to_json` 2 things came to mind that would have made the experience easier on the user:\r\n\r\n1. Could we have a `desc` arg for the tqdm use and a fallback to just `to_json` so that it'd be clear to the user what's happening? Surely, one can just print the description before calling json, but I thought perhaps it'd help to have it self-identify like you did for other progress bars recently.\r\n\r\n2. It took me a while to make sense of the reported numbers:\r\n```\r\n 22%|\u2588\u2588\u258f | 1536\/7076 [12:30:57<44:09:42, 28.70s\/it]\r\n```\r\nSo iteration here happens to be 10K samples, and the total is 70M records. But the user does't know that, so the progress bar is perfect, but the numbers it reports are meaningless until one discovers that 1it=10K samples. And one still has to convert these in the head - so it's not quick. Not exactly sure what's the best way to approach this, perhaps it can be part of `desc`? or report M or K, so it'd be built-in if it were to print, e.g.:\r\n```\r\n 22%|\u2588\u2588\u258f | 15360K\/70760K [12:30:57<44:09:42, 28.70s\/it]\r\n```\r\nor \r\n```\r\n 22%|\u2588\u2588\u258f | 15.36M\/70.76M [12:30:57<44:09:42, 28.70s\/it]\r\n```\r\n(while of course remaining friendly to small datasets)\r\n\r\nI forget if tqdm lets you add a magnitude identifier to the running count.\r\n\r\nThank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2657\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2657\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2656","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2656\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2656\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2656\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2656","id":945421790,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkwNzUzNjA3","number":2656,"title":"Change `from_csv` default arguments","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-15T14:09:06Z","updated_at":"2021-07-16T10:23:26Z","closed_at":"2021-07-16T10:23:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2656","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2656","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2656.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2656.patch","merged_at":null},"body":"Passing `sep=None` to pandas's `read_csv` lets pandas guess the CSV file's separator\r\n\r\nThis PR allows users to use this pandas's feature by passing `sep=None` to `Dataset.from_csv`:\r\n\r\n```python\r\nDataset.from_csv(\r\n ...,\r\n sep=None\r\n)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2656\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2656\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2655","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2655\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2655\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2655\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2655","id":945382723,"node_id":"MDU6SXNzdWU5NDUzODI3MjM=","number":2655,"title":"Allow the selection of multiple columns at once","user":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-15T13:30:45Z","updated_at":"2021-07-23T15:40:57Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nSimilar to pandas, it would be great if we could select multiple columns at once.\r\n\r\n\r\n**Describe the solution you'd like**\r\n```python\r\nmy_dataset = ... # Has columns ['idx', 'sentence', 'label']\r\nidx, label = my_dataset[['idx', 'label']]\r\n```\r\n\r\n**Describe alternatives you've considered**\r\nwe can do `[dataset[col] for col in ('idx', 'label')]`\r\n\r\n**Additional context**\r\nThis is of course very minor.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2655\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2655\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2654","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2654\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2654\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2654\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2654","id":945167231,"node_id":"MDU6SXNzdWU5NDUxNjcyMzE=","number":2654,"title":"Give a user feedback if the dataset he loads is streamable or not","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-07-15T09:07:27Z","updated_at":"2021-08-02T11:03:21Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nI would love to know if a `dataset` is with the current implementation streamable or not. \r\n\r\n**Describe the solution you'd like**\r\nWe could show a warning when a dataset is loaded with `load_dataset('...',streaming=True)` when its lot streamable, e.g. if it is an archive. \r\n\r\n**Describe alternatives you've considered**\r\nAdd a new metadata tag for \"streaming\"\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2654\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2654\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2653","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2653\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2653\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2653\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2653","id":945102321,"node_id":"MDU6SXNzdWU5NDUxMDIzMjE=","number":2653,"title":"Add SD task for SUPERB","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/7","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7\/labels","id":6931350,"node_id":"MDk6TWlsZXN0b25lNjkzMTM1MA==","number":7,"title":"1.11","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":2,"state":"closed","created_at":"2021-07-09T05:49:00Z","updated_at":"2021-09-02T05:34:03Z","due_on":"2021-07-30T07:00:00Z","closed_at":"2021-09-02T05:34:03Z"},"comments":2,"created_at":"2021-07-15T07:51:40Z","updated_at":"2021-08-04T17:03:52Z","closed_at":"2021-08-04T17:03:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Include the SD (Speaker Diarization) task as described in the [SUPERB paper](https:\/\/arxiv.org\/abs\/2105.01051) and `s3prl` [instructions](https:\/\/github.com\/s3prl\/s3prl\/tree\/master\/s3prl\/downstream#sd-speaker-diarization).\r\n\r\nSteps:\r\n- [x] Generate the LibriMix corpus\r\n- [x] Prepare the corpus for diarization\r\n- [x] Upload these files to the superb-data repo\r\n- [x] Transcribe the corresponding s3prl processing of these files into our superb loading script\r\n- [ ] README: tags + description sections\r\n\r\nRelated to #2619.\r\n\r\ncc: @lewtun \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2653\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2653\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2652","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2652\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2652\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2652\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2652","id":944865924,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkwMjg0MTI4","number":2652,"title":"Fix logging docstring","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-14T23:19:58Z","updated_at":"2021-07-18T11:41:06Z","closed_at":"2021-07-15T09:57:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2652","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2652","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2652.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2652.patch","merged_at":"2021-07-15T09:57:31Z"},"body":"Remove \"no tqdm bars\" from the docstring in the logging module to align it with the changes introduced in #2534.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2652\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2652\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2651","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2651\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2651\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2651\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2651","id":944796961,"node_id":"MDU6SXNzdWU5NDQ3OTY5NjE=","number":2651,"title":"Setting log level higher than warning does not suppress progress bar","user":{"login":"Isa-rentacs","id":1147443,"node_id":"MDQ6VXNlcjExNDc0NDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1147443?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Isa-rentacs","html_url":"https:\/\/github.com\/Isa-rentacs","followers_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/followers","following_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/orgs","repos_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/repos","events_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-07-14T21:06:51Z","updated_at":"2021-12-15T01:59:24Z","closed_at":"2021-07-15T03:41:35Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI would like to disable progress bars for `.map` method (and other methods like `.filter` and `load_dataset` as well).\r\nAccording to #1627 one can suppress it by setting log level higher than `warning`, however doing so doesn't suppress it with version 1.9.0.\r\n\r\nI also tried to set `DATASETS_VERBOSITY` environment variable to `error` or `critical` but it also didn't work.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n\r\nfrom datasets.utils.logging import set_verbosity_error\r\n\r\nset_verbosity_error()\r\n\r\ndef dummy_map(batch):\r\n return batch\r\n\r\ncommon_voice_train = datasets.load_dataset(\"common_voice\", \"de\", split=\"train\")\r\ncommon_voice_test = datasets.load_dataset(\"common_voice\", \"de\", split=\"test\")\r\n\r\ncommon_voice_train.map(dummy_map)\r\n```\r\n\r\n## Expected results\r\n- The progress bar for `.map` call won't be shown\r\n\r\n## Actual results\r\n- The progress bar for `.map` is still shown \r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-5.4.0-1045-aws-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.5\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2651\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2651\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2650","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2650\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2650\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2650\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2650","id":944672565,"node_id":"MDU6SXNzdWU5NDQ2NzI1NjU=","number":2650,"title":"[load_dataset] shard and parallelize the process","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-14T18:04:58Z","updated_at":"2021-10-25T15:53:58Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"- Some huge datasets take forever to build the first time. (e.g. oscar\/en) as it's done in a single cpu core.\r\n- If the build crashes, everything done up to that point gets lost\r\n\r\nRequest: Shard the build over multiple arrow files, which would enable:\r\n- much faster build by parallelizing the build process\r\n- if the process crashed, the completed arrow files don't need to be re-built again\r\n\r\nThank you!\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2650\/reactions","total_count":9,"+1":5,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":2},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2650\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2649","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2649\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2649\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2649\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2649","id":944651229,"node_id":"MDU6SXNzdWU5NDQ2NTEyMjk=","number":2649,"title":"adding progress bar \/ ETA for `load_dataset`","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-14T17:34:39Z","updated_at":"2021-07-14T17:38:00Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Please consider:\r\n```\r\nDownloading and preparing dataset oscar\/unshuffled_deduplicated_en (download: 462.40 GiB, generated: 1.18 TiB, post-processed: Unknown size, total: 1.63 TiB) to cache\/oscar\/unshuffled_deduplicated_en\/1.0.0\/84838bd49d2295f62008383b05620571535451d84545037bb94d6f3501651df2...\r\nHF google storage unreachable. Downloading and preparing it from source\r\n```\r\nand no indication whatsoever of whether things work well or when it'll be done. It's important to have an estimated completion time for when doing slurm jobs since some instances have a cap on run-time.\r\n\r\nI think for this particular job it sat for 30min in total silence and then after 30min it started generating:\r\n```\r\n897850 examples [07:24, 10286.71 examples\/s]\r\n```\r\nwhich is already great!\r\n\r\nRequest: \r\n1. ETA - knowing how many hours to allocate for a slurm job\r\n2. progress bar - helps to know things are working and aren't stuck and where we are at.\r\n\r\nThank you!\r\n\r\n@lhoestq \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2649\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2649\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2648","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2648\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2648\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2648\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2648","id":944484522,"node_id":"MDU6SXNzdWU5NDQ0ODQ1MjI=","number":2648,"title":"Add web_split dataset for Paraphase and Rephrase benchmark","user":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"assignees":[{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-14T14:24:36Z","updated_at":"2021-07-14T14:26:12Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe:\r\nFor getting simple sentences from complex sentence there are dataset and task like wiki_split that is available in hugging face datasets. This web_split is a very similar dataset. There some research paper which states that by combining these two datasets we if we train the model it will yield better results on both tests data.\r\n\r\nThis dataset is made from web NLG data.\r\n\r\nAll the dataset related details are provided in the below repository\r\n\r\nGithub link: https:\/\/github.com\/shashiongithub\/Split-and-Rephrase\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2648\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2648\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2647","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2647\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2647\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2647\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2647","id":944424941,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg5OTExMzky","number":2647,"title":"Fix anchor in README","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-14T13:22:44Z","updated_at":"2021-07-18T11:41:18Z","closed_at":"2021-07-15T06:50:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2647","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2647","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2647.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2647.patch","merged_at":"2021-07-15T06:50:47Z"},"body":"I forgot to push this fix in #2611, so I'm sending it now. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2647\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2647\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2646","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2646\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2646\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2646\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2646","id":944379954,"node_id":"MDU6SXNzdWU5NDQzNzk5NTQ=","number":2646,"title":"downloading of yahoo_answers_topics dataset failed","user":{"login":"vikrant7k","id":66781249,"node_id":"MDQ6VXNlcjY2NzgxMjQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66781249?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vikrant7k","html_url":"https:\/\/github.com\/vikrant7k","followers_url":"https:\/\/api.github.com\/users\/vikrant7k\/followers","following_url":"https:\/\/api.github.com\/users\/vikrant7k\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vikrant7k\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vikrant7k\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vikrant7k\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vikrant7k\/orgs","repos_url":"https:\/\/api.github.com\/users\/vikrant7k\/repos","events_url":"https:\/\/api.github.com\/users\/vikrant7k\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vikrant7k\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-14T12:31:05Z","updated_at":"2021-07-15T09:15:16Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI get an error datasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files when I try to download the yahoo_answers_topics dataset\r\n\r\n## Steps to reproduce the bug\r\n self.dataset = load_dataset(\r\n 'yahoo_answers_topics', cache_dir=self.config['yahoo_cache_dir'], split='train[:90%]')\r\n# Sample code to reproduce the bug\r\n self.dataset = load_dataset(\r\n 'yahoo_answers_topics', cache_dir=self.config['yahoo_cache_dir'], split='train[:90%]')\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2646\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2646\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2645","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2645\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2645\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2645\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2645","id":944374284,"node_id":"MDU6SXNzdWU5NDQzNzQyODQ=","number":2645,"title":"load_dataset processing failed with OS error after downloading a dataset","user":{"login":"fake-warrior8","id":40395156,"node_id":"MDQ6VXNlcjQwMzk1MTU2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40395156?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fake-warrior8","html_url":"https:\/\/github.com\/fake-warrior8","followers_url":"https:\/\/api.github.com\/users\/fake-warrior8\/followers","following_url":"https:\/\/api.github.com\/users\/fake-warrior8\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fake-warrior8\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fake-warrior8\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fake-warrior8\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fake-warrior8\/orgs","repos_url":"https:\/\/api.github.com\/users\/fake-warrior8\/repos","events_url":"https:\/\/api.github.com\/users\/fake-warrior8\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fake-warrior8\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-14T12:23:53Z","updated_at":"2021-07-15T09:34:02Z","closed_at":"2021-07-15T09:34:02Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAfter downloading a dataset like opus100, there is a bug that \r\nOSError: Cannot find data file.\r\nOriginal error:\r\ndlopen: cannot load any more object with static TLS\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nthis_dataset = load_dataset('opus100', 'af-en')\r\n```\r\n\r\n## Expected results\r\nthere is no error when running load_dataset.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\nTraceback (most recent call last):\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prep\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 989, in _prepare_split\r\n example = self.info.features.encode_example(record)\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/features.py\", line 952, in encode_example\r\n example = cast_to_python_objects(example)\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/features.py\", line 219, in cast_to_python_ob\r\n return _cast_to_python_objects(obj)[0]\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/features.py\", line 165, in _cast_to_python_o\r\n import torch\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/torch\/__init__.py\", line 188, in \r\n _load_global_deps()\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/torch\/__init__.py\", line 141, in _load_global_deps\r\n ctypes.CDLL(lib_path, mode=ctypes.RTLD_GLOBAL)\r\n File \"\/home\/anaconda3\/lib\/python3.6\/ctypes\/__init__.py\", line 348, in __init__\r\n self._handle = _dlopen(self._name, mode)\r\nOSError: dlopen: cannot load any more object with static TLS\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"download_hub_opus100.py\", line 9, in \r\n this_dataset = load_dataset('opus100', language_pair)\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 748, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepa\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 658, in _download_and_prep\r\n + str(e)\r\nOSError: Cannot find data file.\r\nOriginal error:\r\ndlopen: cannot load any more object with static TLS\r\n\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-3.13.0-32-generic-x86_64-with-debian-jessie-sid\r\n- Python version: 3.6.6\r\n- PyArrow version: 3.0.0\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2645\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2645\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2644","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2644\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2644\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2644\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2644","id":944254748,"node_id":"MDU6SXNzdWU5NDQyNTQ3NDg=","number":2644,"title":"Batched `map` not allowed to return 0 items","user":{"login":"pcuenca","id":1177582,"node_id":"MDQ6VXNlcjExNzc1ODI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1177582?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pcuenca","html_url":"https:\/\/github.com\/pcuenca","followers_url":"https:\/\/api.github.com\/users\/pcuenca\/followers","following_url":"https:\/\/api.github.com\/users\/pcuenca\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pcuenca\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pcuenca\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pcuenca\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pcuenca\/orgs","repos_url":"https:\/\/api.github.com\/users\/pcuenca\/repos","events_url":"https:\/\/api.github.com\/users\/pcuenca\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pcuenca\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-07-14T09:58:19Z","updated_at":"2021-07-26T14:55:15Z","closed_at":"2021-07-26T14:55:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI'm trying to use `map` to filter a large dataset by selecting rows that match an expensive condition (files referenced by one of the columns need to exist in the filesystem, so we have to `stat` them). According to [the documentation](https:\/\/huggingface.co\/docs\/datasets\/processing.html#augmenting-the-dataset), `a batch mapped function can take as input a batch of size N and return a batch of size M where M can be greater or less than N and can even be zero`.\r\n\r\nHowever, when the returned batch has a size of zero (neither item in the batch fulfilled the condition), we get an `index out of bounds` error. I think that `arrow_writer.py` is [trying to infer the returned types using the first element returned](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/arrow_writer.py#L100), but no elements were returned in this case.\r\n\r\nFor this error to happen, I'm returning a dictionary that contains empty lists for the keys I want to keep, see below. If I return an empty dictionary instead (no keys), then a different error eventually occurs.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndef select_rows(examples):\r\n # `key` is a column name that exists in the original dataset\r\n # The following line simulates no matches found, so we return an empty batch\r\n result = {'key': []}\r\n return result\r\n\r\nfiltered_dataset = dataset.map(\r\n select_rows,\r\n remove_columns = dataset.column_names,\r\n batched = True,\r\n num_proc = 1,\r\n desc = \"Selecting rows with images that exist\"\r\n)\r\n```\r\n\r\nThe code above immediately triggers the exception. If we use the following instead:\r\n\r\n```python\r\ndef select_rows(examples):\r\n # `key` is a column name that exists in the original dataset\r\n result = {'key': []} # or defaultdict or whatever\r\n \r\n # code to check for condition and append elements to result\r\n # some_items_found will be set to True if there were any matching elements in the batch\r\n \r\n return result if some_items_found else {}\r\n```\r\n\r\nThen it _seems_ to work, but it eventually fails with some sort of schema error. I believe it may happen when an empty batch is followed by a non-empty one, but haven't set up a test to verify it.\r\n\r\nIn my opinion, returning a dictionary with empty lists and valid column names should be accepted as a valid result with zero items.\r\n\r\n## Expected results\r\nThe dataset would be filtered and only the matching fields would be returned.\r\n\r\n## Actual results\r\nAn exception is encountered, as described. Using a workaround makes it fail further along the line.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.1.dev0\r\n- Platform: Linux-5.4.0-53-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2644\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2644\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2643","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2643\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2643\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2643\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2643","id":944220273,"node_id":"MDU6SXNzdWU5NDQyMjAyNzM=","number":2643,"title":"Enum used in map functions will raise a RecursionError with dill.","user":{"login":"jorgeecardona","id":100702,"node_id":"MDQ6VXNlcjEwMDcwMg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/100702?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jorgeecardona","html_url":"https:\/\/github.com\/jorgeecardona","followers_url":"https:\/\/api.github.com\/users\/jorgeecardona\/followers","following_url":"https:\/\/api.github.com\/users\/jorgeecardona\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jorgeecardona\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jorgeecardona\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jorgeecardona\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jorgeecardona\/orgs","repos_url":"https:\/\/api.github.com\/users\/jorgeecardona\/repos","events_url":"https:\/\/api.github.com\/users\/jorgeecardona\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jorgeecardona\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-14T09:16:08Z","updated_at":"2021-11-02T09:51:11Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nEnums used in functions pass to `map` will fail at pickling with a maximum recursion exception as described here: https:\/\/github.com\/uqfoundation\/dill\/issues\/250#issuecomment-852566284\r\n\r\nIn my particular case, I use an enum to define an argument with fixed options using the `TraininigArguments` dataclass as base class and the `HfArgumentParser`. In the same file I use a `ds.map` that tries to pickle the content of the module including the definition of the enum that runs into the dill bug described above.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nfrom enum import Enum\r\n\r\nclass A(Enum):\r\n a = 'a'\r\n\r\ndef main():\r\n a = A.a\r\n \r\n def f(x):\r\n return {} if a == a.a else x\r\n \r\n ds = load_dataset('cnn_dailymail', '3.0.0')['test']\r\n ds = ds.map(f, num_proc=15)\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n```\r\n\r\n## Expected results\r\nThe known problem with dill could be prevented as explained in the link above (workaround.) Since `HFArgumentParser` nicely uses the enum class for choices it makes sense to also deal with this bug under the hood.\r\n\r\n## Actual results\r\n\r\n```python\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/site-packages\/dill\/_dill.py\", line 1373, in save_type\r\n pickler.save_reduce(_create_type, (type(obj), obj.__name__,\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/pickle.py\", line 690, in save_reduce\r\n save(args)\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/pickle.py\", line 558, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/pickle.py\", line 899, in save_tuple\r\n save(element)\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/pickle.py\", line 534, in save\r\n self.framer.commit_frame()\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/pickle.py\", line 220, in commit_frame\r\n if f.tell() >= self._FRAME_SIZE_TARGET or force:\r\nRecursionError: maximum recursion depth exceeded while calling a Python object\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-5.9.0-4-amd64-x86_64-with-glibc2.10\r\n- Python version: 3.8.5\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2643\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2643\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2642","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2642\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2642\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2642\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2642","id":944175697,"node_id":"MDU6SXNzdWU5NDQxNzU2OTc=","number":2642,"title":"Support multi-worker with streaming dataset (IterableDataset).","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-14T08:22:58Z","updated_at":"2021-07-15T09:37:34Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nThe current `.map` does not support multi-process, CPU can become bottleneck if the pre-processing is complex (e.g. t5 span masking).\r\n\r\n**Describe the solution you'd like**\r\nIdeally `.map` should support multi-worker like tfds, with `AUTOTUNE`.\r\n\r\n**Describe alternatives you've considered**\r\nA simpler solution is to shard the dataset and process it in parallel with pytorch dataloader. The shard does not need to be of equal size.\r\n* https:\/\/pytorch.org\/docs\/stable\/data.html#torch.utils.data.IterableDataset\r\n\r\n**Additional context**\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2642\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2642\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2641","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2641\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2641\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2641\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2641","id":943838085,"node_id":"MDU6SXNzdWU5NDM4MzgwODU=","number":2641,"title":"load_dataset(\"financial_phrasebank\") NonMatchingChecksumError","user":{"login":"courtmckay","id":13956255,"node_id":"MDQ6VXNlcjEzOTU2MjU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13956255?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/courtmckay","html_url":"https:\/\/github.com\/courtmckay","followers_url":"https:\/\/api.github.com\/users\/courtmckay\/followers","following_url":"https:\/\/api.github.com\/users\/courtmckay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/courtmckay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/courtmckay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/courtmckay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/courtmckay\/orgs","repos_url":"https:\/\/api.github.com\/users\/courtmckay\/repos","events_url":"https:\/\/api.github.com\/users\/courtmckay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/courtmckay\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-13T21:21:49Z","updated_at":"2021-07-19T13:26:10Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAttempting to download the financial_phrasebank dataset results in a NonMatchingChecksumError\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"financial_phrasebank\", 'sentences_allagree')\r\n```\r\n\r\n## Expected results\r\nI expect to see the financial_phrasebank dataset downloaded successfully\r\n\r\n## Actual results\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/www.researchgate.net\/profile\/Pekka_Malo\/publication\/251231364_FinancialPhraseBank-v10\/data\/0c96051eee4fb1d56e000000\/FinancialPhraseBank-v10.zip']\r\n\r\n## Environment info\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-4.14.232-177.418.amzn2.x86_64-x86_64-with-debian-10.6\r\n- Python version: 3.7.10\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2641\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2641\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2640","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2640\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2640\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2640\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2640","id":943591055,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg5MjAxMDkw","number":2640,"title":"Fix docstrings","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-13T16:09:14Z","updated_at":"2021-07-15T06:51:01Z","closed_at":"2021-07-15T06:06:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2640","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2640","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2640.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2640.patch","merged_at":"2021-07-15T06:06:12Z"},"body":"Fix rendering of some docstrings.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2640\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2640\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2639","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2639\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2639\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2639\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2639","id":943527463,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg5MTQ3NDE5","number":2639,"title":"Refactor patching to specific submodule","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-13T15:08:45Z","updated_at":"2021-07-13T16:52:49Z","closed_at":"2021-07-13T16:52:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2639","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2639","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2639.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2639.patch","merged_at":"2021-07-13T16:52:48Z"},"body":"Minor reorganization of the code, so that additional patching functions (not related to streaming) might be created.\r\n\r\nIn relation with the initial approach followed in #2631.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2639\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2639\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2638","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2638\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2638\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2638\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2638","id":943484913,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg5MTA5NTg1","number":2638,"title":"Streaming for the Json loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-13T14:37:06Z","updated_at":"2021-07-16T15:59:32Z","closed_at":"2021-07-16T15:59:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2638","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2638","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2638.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2638.patch","merged_at":"2021-07-16T15:59:31Z"},"body":"It was not using `open` in the builder. Therefore `pyarrow.json.read_json` was downloading the full file to start yielding rows.\r\n\r\nMoreover, it appeared that `pyarrow.json.read_json` was not really suited for streaming as it was downloading too much data and failing if `block_size` was not properly configured (related to #2573).\r\n\r\nSo I switched to using `open` which is extended to support reading from remote file progressively, and I removed the pyarrow json reader which was not practical.\r\nInstead, I'm using the classical `json.loads` from the standard library.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2638\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2638\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2637","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2637\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2637\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2637\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2637","id":943290736,"node_id":"MDU6SXNzdWU5NDMyOTA3MzY=","number":2637,"title":"Add the CIDEr metric?","user":{"login":"zuujhyt","id":75845952,"node_id":"MDQ6VXNlcjc1ODQ1OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75845952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zuujhyt","html_url":"https:\/\/github.com\/zuujhyt","followers_url":"https:\/\/api.github.com\/users\/zuujhyt\/followers","following_url":"https:\/\/api.github.com\/users\/zuujhyt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zuujhyt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zuujhyt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zuujhyt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zuujhyt\/orgs","repos_url":"https:\/\/api.github.com\/users\/zuujhyt\/repos","events_url":"https:\/\/api.github.com\/users\/zuujhyt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zuujhyt\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-13T12:22:51Z","updated_at":"2021-09-27T07:57:05Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI find the api in https:\/\/huggingface.co\/metrics quite useful.\r\nI am playing around with video\/image captioning task, where CIDEr is a popular metric.\r\nDo you plan to add this into the HF ```datasets``` library?\r\nThanks.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2637\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2637\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2636","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2636\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2636\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2636\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2636","id":943044514,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg4NzEyMTY4","number":2636,"title":"Streaming for the Pandas loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-13T09:18:21Z","updated_at":"2021-07-13T14:37:24Z","closed_at":"2021-07-13T14:37:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2636","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2636","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2636.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2636.patch","merged_at":"2021-07-13T14:37:23Z"},"body":"It was not using open in the builder. Therefore pd.read_pickle could fail when streaming from a private repo for example.\r\n\r\nIndeed, when streaming, open is extended to support reading from remote files and handles authentication to the HF Hub","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2636\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2636\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2635","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2635\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2635\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2635\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2635","id":943030999,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg4Njk5OTM5","number":2635,"title":"Streaming for the CSV loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-13T09:08:58Z","updated_at":"2021-07-13T15:19:38Z","closed_at":"2021-07-13T15:19:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2635","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2635","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2635.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2635.patch","merged_at":"2021-07-13T15:19:37Z"},"body":"It was not using `open` in the builder. Therefore `pd.read_csv` was downloading the full file to start yielding rows.\r\n\r\nIndeed, when streaming, `open` is extended to support reading from remote file progressively.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2635\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2635\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2634","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2634\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2634\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2634\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2634","id":942805621,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg4NDk2Mzc2","number":2634,"title":"Inject ASR template for lj_speech dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-13T06:04:54Z","updated_at":"2021-07-13T09:05:09Z","closed_at":"2021-07-13T09:05:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2634","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2634","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2634.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2634.patch","merged_at":"2021-07-13T09:05:09Z"},"body":"Related to: #2565, #2633.\r\n\r\ncc: @lewtun ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2634\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2634\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2633","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2633\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2633\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2633\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2633","id":942396414,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg4MTMwOTA5","number":2633,"title":"Update ASR tags","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-12T19:58:31Z","updated_at":"2021-07-13T05:45:26Z","closed_at":"2021-07-13T05:45:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2633","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2633","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2633.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2633.patch","merged_at":"2021-07-13T05:45:13Z"},"body":"This PR updates the ASR tags of the 5 datasets added in #2565 following the change of task categories in #2620 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2633\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2633\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2632","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2632\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2632\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2632\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2632","id":942293727,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg4MDQyMjcw","number":2632,"title":"add image-classification task template","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-12T17:41:03Z","updated_at":"2021-07-13T15:44:28Z","closed_at":"2021-07-13T15:28:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2632","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2632","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2632.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2632.patch","merged_at":"2021-07-13T15:28:15Z"},"body":"Snippet below is the tl;dr, but you can try it out directly here:\r\n\r\n[![Open In Collab](https:\/\/colab.research.google.com\/assets\/colab-badge.svg)](https:\/\/colab.research.google.com\/gist\/nateraw\/005c025d41f0e48ae3d4ee61c0f20b70\/image-classification-task-template-demo.ipynb)\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset('nateraw\/image-folder', data_files='PetImages\/')\r\n# DatasetDict({\r\n# train: Dataset({\r\n# features: ['file', 'labels'],\r\n# num_rows: 23410\r\n# })\r\n# })\r\n\r\nds = ds.prepare_for_task('image-classification')\r\n# DatasetDict({\r\n# train: Dataset({\r\n# features: ['image_file_path', 'labels'],\r\n# num_rows: 23410\r\n# })\r\n# })\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2632\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":3,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2632\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2631","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2631\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2631\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2631\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2631","id":942242271,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg3OTk3MzM2","number":2631,"title":"Delete extracted files when loading dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":13,"created_at":"2021-07-12T16:39:33Z","updated_at":"2021-07-19T09:08:19Z","closed_at":"2021-07-19T09:08:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2631","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2631","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2631.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2631.patch","merged_at":"2021-07-19T09:08:18Z"},"body":"Close #2481, close #2604, close #2591.\r\n\r\ncc: @stas00, @thomwolf, @BirgerMoell ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2631\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2631\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2630","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2630\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2630\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2630\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2630","id":942102956,"node_id":"MDU6SXNzdWU5NDIxMDI5NTY=","number":2630,"title":"Progress bars are not properly rendered in Jupyter notebook","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-12T14:07:13Z","updated_at":"2022-02-03T15:55:33Z","closed_at":"2022-02-03T15:55:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe progress bars are not Jupyter widgets; regular progress bars appear (like in a terminal).\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nds.map(tokenize, num_proc=10)\r\n```\r\n\r\n## Expected results\r\nJupyter widgets displaying the progress bars.\r\n\r\n## Actual results\r\nSimple plane progress bars.\r\n\r\ncc: Reported by @thomwolf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2630\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2630\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2629","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2629\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2629\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2629\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2629","id":941819205,"node_id":"MDU6SXNzdWU5NDE4MTkyMDU=","number":2629,"title":"Load datasets from the Hub without requiring a dataset script","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-12T08:45:17Z","updated_at":"2021-08-25T14:18:08Z","closed_at":"2021-08-25T14:18:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As a user I would like to be able to upload my csv\/json\/text\/parquet\/etc. files in a dataset repository on the Hugging Face Hub and be able to load this dataset with `load_dataset` without having to implement a dataset script.\r\n\r\nMoreover I would like to be able to specify which file goes into which split using the `data_files` argument.\r\n\r\nThis feature should be compatible with private repositories and dataset streaming.\r\n\r\nThis can be implemented by checking the extension of the files in the dataset repository and then by using the right dataset builder that is already packaged in the library (csv\/json\/text\/parquet\/etc.)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2629\/reactions","total_count":11,"+1":0,"-1":0,"laugh":0,"hooray":2,"confused":0,"heart":7,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2629\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2628","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2628\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2628\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2628\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2628","id":941676404,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg3NTE0NzQz","number":2628,"title":"Use ETag of remote data files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-12T05:10:10Z","updated_at":"2021-07-12T14:08:34Z","closed_at":"2021-07-12T08:40:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2628","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2628","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2628.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2628.patch","merged_at":"2021-07-12T08:40:07Z"},"body":"Use ETag of remote data files to create config ID.\r\n\r\nRelated to #2616.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2628\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2628\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2627","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2627\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2627\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2627\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2627","id":941503349,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg3MzczMDg1","number":2627,"title":"Minor fix tests with Windows paths","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-11T17:55:48Z","updated_at":"2021-07-12T14:08:47Z","closed_at":"2021-07-12T08:34:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2627","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2627","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2627.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2627.patch","merged_at":"2021-07-12T08:34:50Z"},"body":"Minor fix tests with Windows paths.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2627\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2627\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2626","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2626\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2626\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2626\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2626","id":941497830,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg3MzY4OTMz","number":2626,"title":"Use correct logger in metrics.py","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-11T17:22:30Z","updated_at":"2021-07-12T14:08:54Z","closed_at":"2021-07-12T05:54:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2626","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2626","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2626.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2626.patch","merged_at":"2021-07-12T05:54:29Z"},"body":"Fixes #2624 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2626\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2626\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2625","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2625\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2625\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2625\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2625","id":941439922,"node_id":"MDU6SXNzdWU5NDE0Mzk5MjI=","number":2625,"title":"\u269b\ufe0f\ud83d\ude07\u2699\ufe0f\ud83d\udd11","user":{"login":"hustlen0mics","id":50596661,"node_id":"MDQ6VXNlcjUwNTk2NjYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50596661?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hustlen0mics","html_url":"https:\/\/github.com\/hustlen0mics","followers_url":"https:\/\/api.github.com\/users\/hustlen0mics\/followers","following_url":"https:\/\/api.github.com\/users\/hustlen0mics\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hustlen0mics\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hustlen0mics\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hustlen0mics\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hustlen0mics\/orgs","repos_url":"https:\/\/api.github.com\/users\/hustlen0mics\/repos","events_url":"https:\/\/api.github.com\/users\/hustlen0mics\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hustlen0mics\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-11T12:14:34Z","updated_at":"2021-07-12T05:55:59Z","closed_at":"2021-07-12T05:55:59Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2625\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2625\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2624","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2624\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2624\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2624\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2624","id":941318247,"node_id":"MDU6SXNzdWU5NDEzMTgyNDc=","number":2624,"title":"can't set verbosity for `metric.py`","user":{"login":"thomas-happify","id":66082334,"node_id":"MDQ6VXNlcjY2MDgyMzM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66082334?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomas-happify","html_url":"https:\/\/github.com\/thomas-happify","followers_url":"https:\/\/api.github.com\/users\/thomas-happify\/followers","following_url":"https:\/\/api.github.com\/users\/thomas-happify\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomas-happify\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomas-happify\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomas-happify\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomas-happify\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomas-happify\/repos","events_url":"https:\/\/api.github.com\/users\/thomas-happify\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomas-happify\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-10T20:23:45Z","updated_at":"2021-07-12T05:54:29Z","closed_at":"2021-07-12T05:54:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n```\r\n[2021-07-10 20:13:11,528][datasets.utils.filelock][INFO] - Lock 139705371374976 acquired on \/root\/.cache\/huggingface\/metrics\/seqeval\/default\/default_experiment-1-0.arrow.lock\r\n[2021-07-10 20:13:11,529][datasets.arrow_writer][INFO] - Done writing 32 examples in 6100 bytes \/root\/.cache\/huggingface\/metrics\/seqeval\/default\/default_experiment-1-0.arrow.\r\n[2021-07-10 20:13:11,531][datasets.arrow_dataset][INFO] - Set __getitem__(key) output type to python objects for no columns (when key is int or slice) and don't output other (un-formatted) columns.\r\n[2021-07-10 20:13:11,543][\/conda\/envs\/myenv\/lib\/python3.8\/site-packages\/datasets\/metric.py][INFO] - Removing \/root\/.cache\/huggingface\/metrics\/seqeval\/default\/default_experiment-1-0.arrow\r\n```\r\nAs you can see, `datasets` logging come from different places. \r\n`filelock`, `arrow_writer` & `arrow_dataset` comes from `datasets.*` which are expected \r\nHowever, `metric.py` logging comes from `\/conda\/envs\/myenv\/lib\/python3.8\/site-packages\/datasets\/`\r\n\r\nSo when setting `datasets.utils.logging.set_verbosity_error()`, it still logs the last message which is annoying during evaluation. \r\n\r\nI had to do \r\n```\r\nlogging.getLogger(\"\/conda\/envs\/myenv\/lib\/python3.8\/site-packages\/datasets\/metric\").setLevel(logging.ERROR)\r\n``` \r\nto fully mute these messages\r\n\r\n## Expected results\r\nit shouldn't log these messages when setting `datasets.utils.logging.set_verbosity_error()`\r\n\r\n## Environment info\r\n\r\n- `datasets` version: tried both 1.8.0 & 1.9.0\r\n- Platform: Ubuntu 18.04.5 LTS \r\n- Python version: 3.8.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2624\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2624\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2623","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2623\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2623\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2623\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2623","id":941265342,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg3MTk0MjM3","number":2623,"title":"[Metrics] added wiki_split metrics","user":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-10T14:51:50Z","updated_at":"2021-07-14T14:28:13Z","closed_at":"2021-07-12T22:34:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2623","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2623","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2623.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2623.patch","merged_at":"2021-07-12T22:34:31Z"},"body":"Fixes: #2606\r\n\r\nThis pull request adds combine metrics for the wikisplit or English sentence split task\r\n\r\nReviewer: @patrickvonplaten ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2623\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2623\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2622","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2622\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2622\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2622\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2622","id":941127785,"node_id":"MDU6SXNzdWU5NDExMjc3ODU=","number":2622,"title":"Integration with AugLy","user":{"login":"Darktex","id":890615,"node_id":"MDQ6VXNlcjg5MDYxNQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/890615?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Darktex","html_url":"https:\/\/github.com\/Darktex","followers_url":"https:\/\/api.github.com\/users\/Darktex\/followers","following_url":"https:\/\/api.github.com\/users\/Darktex\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Darktex\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Darktex\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Darktex\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Darktex\/orgs","repos_url":"https:\/\/api.github.com\/users\/Darktex\/repos","events_url":"https:\/\/api.github.com\/users\/Darktex\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Darktex\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-10T00:03:09Z","updated_at":"2021-07-11T17:08:11Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nFacebook recently launched a library, [AugLy](https:\/\/github.com\/facebookresearch\/AugLy) , that has a unified API for augmentations for image, video and text.\r\n\r\nIt would be pretty exciting to have it hooked up to HF libraries so that we can make NLP models robust to misspellings or to punctuation, or emojis etc. Plus, with Transformers supporting more CV use cases, having augmentations support becomes crucial.\r\n\r\n**Describe the solution you'd like**\r\nThe biggest difference between augmentations and preprocessing is that preprocessing happens only once, but you are running augmentations once per epoch. AugLy operates on text directly, so this breaks the typical workflow where we would run the tokenizer once, set format to pt tensors and be ready for the Dataloader.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nOne possible way of implementing these is to make a custom Dataset class where getitem(i) runs the augmentation and the tokenizer every time, though this would slow training down considerably given we wouldn't even run the tokenizer in batches.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2622\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2622\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2621","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2621\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2621\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2621\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2621","id":940916446,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2OTE1Mzcw","number":2621,"title":"Use prefix to allow exceed Windows MAX_PATH","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-07-09T16:39:53Z","updated_at":"2021-07-16T15:28:12Z","closed_at":"2021-07-16T15:28:11Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2621","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2621","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2621.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2621.patch","merged_at":"2021-07-16T15:28:11Z"},"body":"By using this prefix, you can exceed the Windows MAX_PATH limit.\r\n\r\nSee: https:\/\/docs.microsoft.com\/en-us\/windows\/win32\/fileio\/naming-a-file?redirectedfrom=MSDN#win32-file-namespaces\r\n\r\nRelated to #2524, #2220.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2621\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2621\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2620","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2620\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2620\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2620\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2620","id":940893389,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2ODk3MDky","number":2620,"title":"Add speech processing tasks","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-09T16:07:29Z","updated_at":"2021-07-12T18:32:59Z","closed_at":"2021-07-12T17:32:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2620","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2620","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2620.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2620.patch","merged_at":"2021-07-12T17:32:02Z"},"body":"This PR replaces the `automatic-speech-recognition` task category with a broader `speech-processing` category. \r\n\r\nThe tasks associated with this category are derived from the [SUPERB benchmark](https:\/\/arxiv.org\/abs\/2105.01051), and ASR is included in this set.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2620\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2620\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2619","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2619\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2619\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2619\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2619","id":940858236,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2ODY3NDA4","number":2619,"title":"Add ASR task for SUPERB","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":3,"created_at":"2021-07-09T15:19:45Z","updated_at":"2021-07-15T08:55:58Z","closed_at":"2021-07-13T12:40:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2619","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2619","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2619.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2619.patch","merged_at":"2021-07-13T12:40:18Z"},"body":"This PR starts building up the SUPERB benchmark by including the ASR task as described in the [SUPERB paper](https:\/\/arxiv.org\/abs\/2105.01051) and `s3prl` [instructions](https:\/\/github.com\/s3prl\/s3prl\/tree\/v0.2.0\/downstream#asr-automatic-speech-recognition).\r\n\r\nUsage:\r\n\r\n```python\r\nfrom datasets import load_dataset \r\n\r\nasr = load_dataset(\"superb\", \"asr\")\r\n# DatasetDict({\r\n# train: Dataset({\r\n# features: ['file', 'text', 'speaker_id', 'chapter_id', 'id'],\r\n# num_rows: 28539\r\n# })\r\n# validation: Dataset({\r\n# features: ['file', 'text', 'speaker_id', 'chapter_id', 'id'],\r\n# num_rows: 2703\r\n# })\r\n# test: Dataset({\r\n# features: ['file', 'text', 'speaker_id', 'chapter_id', 'id'],\r\n# num_rows: 2620\r\n# })\r\n# })\r\n```\r\n\r\nI've used the GLUE benchmark as a guide for filling out the README.\r\n\r\nTo move fast during the evaluation PoC I propose to merge one task at a time, so we can continue building the training \/ evaluation framework in parallel.\r\n\r\nNote: codewise this PR is ready for review - I'll add the missing YAML tags once #2620 is merged :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2619\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2619\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2618","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2618\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2618\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2618\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2618","id":940852640,"node_id":"MDU6SXNzdWU5NDA4NTI2NDA=","number":2618,"title":"`filelock.py` Error","user":{"login":"liyucheng09","id":27999909,"node_id":"MDQ6VXNlcjI3OTk5OTA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27999909?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/liyucheng09","html_url":"https:\/\/github.com\/liyucheng09","followers_url":"https:\/\/api.github.com\/users\/liyucheng09\/followers","following_url":"https:\/\/api.github.com\/users\/liyucheng09\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/liyucheng09\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/liyucheng09\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/liyucheng09\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/liyucheng09\/orgs","repos_url":"https:\/\/api.github.com\/users\/liyucheng09\/repos","events_url":"https:\/\/api.github.com\/users\/liyucheng09\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/liyucheng09\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-09T15:12:49Z","updated_at":"2021-07-12T06:20:30Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nIt seems that the `filelock.py` went error. \r\n\r\n```\r\n>>> ds=load_dataset('xsum')\r\n\r\n^CTraceback (most recent call last):\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/utils\/filelock.py\", line 402, in _acquire\r\n fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)\r\nOSError: [Errno 37] No locks available\r\n```\r\n\r\nAccording to error log, it is OSError, but there is an `except` in the `_acquire` function.\r\n\r\n```\r\n def _acquire(self):\r\n open_mode = os.O_WRONLY | os.O_CREAT | os.O_EXCL | os.O_TRUNC\r\n try:\r\n fd = os.open(self._lock_file, open_mode)\r\n except (IOError, OSError):\r\n pass\r\n else:\r\n self._lock_file_fd = fd\r\n return None\r\n```\r\n\r\nI don't know why it stucked rather than `pass` directly.\r\n\r\nI am not quite familiar with filelock operation, so any help is highly appriciated.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n\r\nds = load_dataset('xsum')\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\n```\r\n>>> ds=load_dataset('xsum')\r\n\r\n^CTraceback (most recent call last):\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/utils\/filelock.py\", line 402, in _acquire\r\n fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)\r\nOSError: [Errno 37] No locks available\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 818, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 470, in prepare_module\r\n with FileLock(lock_path):\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/utils\/filelock.py\", line 323, in __enter__\r\n self.acquire()\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/utils\/filelock.py\", line 272, in acquire\r\n self._acquire()\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/utils\/filelock.py\", line 402, in _acquire\r\n fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)\r\nKeyboardInterrupt\r\n```\r\n\r\n## Environment info\r\n\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-4.15.0-135-generic-x86_64-with-debian-buster-sid\r\n- Python version: 3.6.13\r\n- PyArrow version: 4.0.1\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2618\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2618\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2617","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2617\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2617\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2617\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2617","id":940846847,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2ODU3NzQz","number":2617,"title":"Fix missing EOL issue in to_json for old versions of pandas","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-09T15:05:45Z","updated_at":"2021-07-12T14:09:00Z","closed_at":"2021-07-09T15:28:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2617","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2617","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2617.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2617.patch","merged_at":"2021-07-09T15:28:33Z"},"body":"Some versions of pandas don't add an EOL at the end of the output of `to_json`.\r\nTherefore users could end up having two samples in the same line\r\n\r\nClose https:\/\/github.com\/huggingface\/datasets\/issues\/2615","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2617\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2617\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2616","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2616\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2616\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2616\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2616","id":940799038,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2ODE3NjYz","number":2616,"title":"Support remote data files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":2,"created_at":"2021-07-09T14:07:38Z","updated_at":"2021-07-09T16:13:41Z","closed_at":"2021-07-09T16:13:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2616","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2616","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2616.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2616.patch","merged_at":"2021-07-09T16:13:41Z"},"body":"Add support for (streaming) remote data files:\r\n\r\n```python\r\ndata_files = f\"https:\/\/huggingface.co\/datasets\/{repo_id}\/resolve\/main\/{relative_file_path}\"\r\nds = load_dataset(\"json\", split=\"train\", data_files=data_files, streaming=True)\r\n```\r\n\r\ncc: @thomwolf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2616\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2616\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2615","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2615\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2615\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2615\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2615","id":940794339,"node_id":"MDU6SXNzdWU5NDA3OTQzMzk=","number":2615,"title":"Jsonlines export error","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":10,"created_at":"2021-07-09T14:02:05Z","updated_at":"2021-07-09T15:29:07Z","closed_at":"2021-07-09T15:28:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen exporting large datasets in jsonlines (c4 in my case) the created file has an error every 9999 lines: the 9999th and 10000th are concatenated, thus breaking the jsonlines format. This sounds like it is related to batching, which is by 10000 by default\r\n\r\n## Steps to reproduce the bug\r\nThis what I'm running:\r\n\r\nin python:\r\n\r\n```\r\nfrom datasets import load_dataset\r\nptb = load_dataset(\"ptb_text_only\")\r\nptb[\"train\"].to_json(\"ptb.jsonl\")\r\n```\r\n\r\nthen out of python:\r\n\r\n```\r\nhead -10000 ptb.jsonl\r\n```\r\n\r\n## Expected results\r\nProperly separated lines\r\n\r\n## Actual results\r\nThe last line is a concatenation of two lines\r\n\r\n## Environment info\r\n\r\n\r\n- `datasets` version: 1.9.1.dev0\r\n- Platform: Linux-5.4.0-1046-gcp-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.6.9\r\n- PyArrow version: 4.0.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2615\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2615\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2614","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2614\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2614\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2614\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2614","id":940762427,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2Nzg2NTg3","number":2614,"title":"Convert numpy scalar to python float in Pearsonr output","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-09T13:22:55Z","updated_at":"2021-07-12T14:13:02Z","closed_at":"2021-07-09T14:04:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2614","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2614","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2614.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2614.patch","merged_at":"2021-07-09T14:04:38Z"},"body":"Following of https:\/\/github.com\/huggingface\/datasets\/pull\/2612","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2614\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2614\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2613","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2613\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2613\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2613\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2613","id":940759852,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2Nzg0MzY0","number":2613,"title":"Use ndarray.item instead of ndarray.tolist","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-09T13:19:35Z","updated_at":"2021-07-12T14:12:57Z","closed_at":"2021-07-09T13:50:05Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2613","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2613","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2613.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2613.patch","merged_at":"2021-07-09T13:50:05Z"},"body":"This PR follows up on #2612 to use `numpy.ndarray.item` instead of `numpy.ndarray.tolist` as the latter is somewhat confusing to the developer (even though it works).\r\n\r\nJudging from the `numpy` docs, `ndarray.item` is closer to what we want: https:\/\/numpy.org\/doc\/stable\/reference\/generated\/numpy.ndarray.item.html#numpy-ndarray-item\r\n\r\nPS. Sorry for the duplicate work here. I should have read the numpy docs more carefully in #2612 \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2613\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2613\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2612","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2612\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2612\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2612\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2612","id":940604512,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2NjUwMjk3","number":2612,"title":"Return Python float instead of numpy.float64 in sklearn metrics","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":3,"created_at":"2021-07-09T09:48:09Z","updated_at":"2021-07-12T14:12:53Z","closed_at":"2021-07-09T13:03:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2612","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2612","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2612.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2612.patch","merged_at":"2021-07-09T13:03:54Z"},"body":"This PR converts the return type of all `sklearn` metrics to be Python `float` instead of `numpy.float64`.\r\n\r\nThe reason behind this is that our Hub evaluation framework relies on converting benchmark-specific metrics to YAML ([example](https:\/\/huggingface.co\/datasets\/autonlp\/autonlp-benchmark-raft-neelalex__raft-test-neelalex__raft-predictions-3\/blob\/main\/README.md#L11)) and the `numpy.float64` format produces garbage like:\r\n\r\n```python\r\nimport yaml\r\nfrom datasets import load_metric\r\n\r\nmetric = load_metric(\"accuracy\")\r\nscore = metric.compute(predictions=[0,1], references=[0,1])\r\nprint(yaml.dump(score[\"accuracy\"])) # output below\r\n# !!python\/object\/apply:numpy.core.multiarray.scalar\r\n# - !!python\/object\/apply:numpy.dtype\r\n# args:\r\n# - f8\r\n# - false\r\n# - true\r\n# state: !!python\/tuple\r\n# - 3\r\n# - <\r\n# - null\r\n# - null\r\n# - null\r\n# - -1\r\n# - -1\r\n# - 0\r\n# - !!binary |\r\n# AAAAAAAA8D8=\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2612\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2612\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2611","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2611\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2611\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2611\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2611","id":940307053,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2Mzk5MjU3","number":2611,"title":"More consistent naming","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-09T00:09:17Z","updated_at":"2021-07-13T17:13:19Z","closed_at":"2021-07-13T16:08:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2611","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2611","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2611.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2611.patch","merged_at":"2021-07-13T16:08:30Z"},"body":"As per @stas00's suggestion in #2500, this PR inserts a space between the logo and the lib name (`\ud83e\udd17Datasets` -> `\ud83e\udd17 Datasets`) for consistency with the Transformers lib. Additionally, more consistent names are used for Datasets Hub, etc.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2611\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2611\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2610","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2610\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2610\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2610\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2610","id":939899829,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2MDUwMzI5","number":2610,"title":"Add missing WikiANN language tags","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-08T14:08:01Z","updated_at":"2021-07-12T14:12:16Z","closed_at":"2021-07-08T15:44:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2610","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2610","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2610.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2610.patch","merged_at":"2021-07-08T15:44:04Z"},"body":"Add missing language tags for WikiANN datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2610\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2610\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2609","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2609\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2609\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2609\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2609","id":939616682,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg1ODA3MTMz","number":2609,"title":"Fix potential DuplicatedKeysError","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":1,"created_at":"2021-07-08T08:38:04Z","updated_at":"2021-07-12T14:13:16Z","closed_at":"2021-07-09T16:42:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2609","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2609","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2609.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2609.patch","merged_at":"2021-07-09T16:42:08Z"},"body":"Fix potential DiplicatedKeysError by ensuring keys are unique.\r\n\r\nWe should promote as a good practice, that the keys should be programmatically generated as unique, instead of read from data (which might be not unique).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2609\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2609\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2608","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2608\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2608\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2608\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2608","id":938897626,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg1MjAwMDYw","number":2608,"title":"Support streaming JSON files ","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-07T13:30:22Z","updated_at":"2021-07-12T14:12:31Z","closed_at":"2021-07-08T16:08:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2608","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2608","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2608.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2608.patch","merged_at":"2021-07-08T16:08:40Z"},"body":"Use open in JSON dataset builder, so that it can be patched with xopen for streaming.\r\n\r\nClose #2607.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2608\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2608\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2607","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2607\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2607\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2607\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2607","id":938796902,"node_id":"MDU6SXNzdWU5Mzg3OTY5MDI=","number":2607,"title":"Streaming local gzip compressed JSON line files is not working","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-07-07T11:36:33Z","updated_at":"2021-07-20T09:50:19Z","closed_at":"2021-07-08T16:08:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nUsing streaming to iterate on local gzip compressed JSON files raise a file not exist error\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nstreamed_dataset = load_dataset('json', split='train', data_files=data_files, streaming=True)\r\n\r\nnext(iter(streamed_dataset))\r\n```\r\n\r\n## Actual results\r\n```\r\nFileNotFoundError Traceback (most recent call last)\r\n in \r\n----> 1 next(iter(streamed_dataset))\r\n\r\n~\/Documents\/GitHub\/datasets\/src\/datasets\/iterable_dataset.py in __iter__(self)\r\n 336 \r\n 337 def __iter__(self):\r\n--> 338 for key, example in self._iter():\r\n 339 if self.features:\r\n 340 # we encode the example for ClassLabel feature types for example\r\n\r\n~\/Documents\/GitHub\/datasets\/src\/datasets\/iterable_dataset.py in _iter(self)\r\n 333 else:\r\n 334 ex_iterable = self._ex_iterable\r\n--> 335 yield from ex_iterable\r\n 336 \r\n 337 def __iter__(self):\r\n\r\n~\/Documents\/GitHub\/datasets\/src\/datasets\/iterable_dataset.py in __iter__(self)\r\n 76 \r\n 77 def __iter__(self):\r\n---> 78 for key, example in self.generate_examples_fn(**self.kwargs):\r\n 79 yield key, example\r\n 80 \r\n\r\n~\/Documents\/GitHub\/datasets\/src\/datasets\/iterable_dataset.py in wrapper(**kwargs)\r\n 282 def wrapper(**kwargs):\r\n 283 python_formatter = PythonFormatter()\r\n--> 284 for key, table in generate_tables_fn(**kwargs):\r\n 285 batch = python_formatter.format_batch(table)\r\n 286 for i, example in enumerate(_batch_to_examples(batch)):\r\n\r\n~\/Documents\/GitHub\/datasets\/src\/datasets\/packaged_modules\/json\/json.py in _generate_tables(self, files, original_files)\r\n 85 file,\r\n 86 read_options=self.config.pa_read_options,\r\n---> 87 parse_options=self.config.pa_parse_options,\r\n 88 )\r\n 89 except pa.ArrowInvalid as err:\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/_json.pyx in pyarrow._json.read_json()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/_json.pyx in pyarrow._json._get_reader()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/io.pxi in pyarrow.lib.get_input_stream()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/io.pxi in pyarrow.lib.get_native_file()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/io.pxi in pyarrow.lib.OSFile.__cinit__()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/io.pxi in pyarrow.lib.OSFile._open_readable()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nFileNotFoundError: [Errno 2] Failed to open local file 'gzip:\/\/file-000000000000.json::\/Users\/thomwolf\/github-dataset\/file-000000000000.json.gz'. Detail: [errno 2] No such file or directory\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.9.1.dev0\r\n- Platform: Darwin-19.6.0-x86_64-i386-64bit\r\n- Python version: 3.7.7\r\n- PyArrow version: 1.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2607\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2607\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2606","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2606\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2606\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2606\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2606","id":938763684,"node_id":"MDU6SXNzdWU5Mzg3NjM2ODQ=","number":2606,"title":"[Metrics] addition of wiki_split metrics","user":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2459308248,"node_id":"MDU6TGFiZWwyNDU5MzA4MjQ4","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20request","name":"metric request","color":"d4c5f9","default":false,"description":"Requesting to add a new metric"}],"state":"closed","locked":false,"assignee":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"assignees":[{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-07T10:56:04Z","updated_at":"2021-07-12T22:34:31Z","closed_at":"2021-07-12T22:34:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nWhile training the model on sentence split the task in English we require to evaluate the trained model on `Exact Match`, `SARI` and `BLEU` score\r\nlike this \r\n![image](https:\/\/user-images.githubusercontent.com\/26653468\/124746876-ff5a3380-df3e-11eb-9a01-4b48db7a6694.png)\r\nWhile training we require metrics which can give all the output\r\n\r\nCurrently, we don't have an exact match for text normalized data\r\n\r\n**Describe the solution you'd like**\r\nA custom metrics for wiki_split that can calculate these three values and provide it in the form of a single dictionary\r\nFor exact match, we can refer to [this](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/src\/transformers\/data\/metrics\/squad_metrics.py) \r\n\r\n**Describe alternatives you've considered**\r\nTwo metrics are already present one more can be added for an exact match then we can run all three metrics in training script\r\n\r\n#self-assign","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2606\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2606\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2605","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2605\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2605\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2605\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2605","id":938648164,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0OTkyODIz","number":2605,"title":"Make any ClientError trigger retry in streaming mode (e.g. ClientOSError)","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-07T08:47:23Z","updated_at":"2021-07-12T14:10:27Z","closed_at":"2021-07-07T08:59:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2605","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2605","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2605.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2605.patch","merged_at":"2021-07-07T08:59:13Z"},"body":"During the FLAX sprint some users have this error when streaming datasets:\r\n```python\r\naiohttp.client_exceptions.ClientOSError: [Errno 104] Connection reset by peer\r\n```\r\nThis error must trigger a retry instead of directly crashing\r\n\r\nTherefore I extended the error type that triggers the retry to be the base aiohttp error type: `ClientError`\r\nIn particular both `ClientOSError` and `ServerDisconnectedError` inherit from `ClientError`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2605\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2605\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2604","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2604\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2604\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2604\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2604","id":938602237,"node_id":"MDU6SXNzdWU5Mzg2MDIyMzc=","number":2604,"title":"Add option to delete temporary files (e.g. extracted files) when loading dataset","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":14,"created_at":"2021-07-07T07:56:16Z","updated_at":"2021-07-19T09:08:18Z","closed_at":"2021-07-19T09:08:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm loading a dataset constituted of 44 GB of compressed JSON files.\r\n\r\nWhen loading the dataset with the JSON script, extracting the files create about 200 GB of uncompressed files before creating the 180GB of arrow cache tables\r\n\r\nHaving a simple way to delete the extracted files after usage (or even better, to stream extraction\/delete) would be nice to avoid disk cluter.\r\n\r\nI can maybe tackle this one in the JSON script unless you want a more general solution.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2604\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2604\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2603","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2603\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2603\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2603\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2603","id":938588149,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0OTQ0ODcz","number":2603,"title":"Fix DuplicatedKeysError in omp","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-07T07:38:32Z","updated_at":"2021-07-12T14:10:41Z","closed_at":"2021-07-07T12:56:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2603","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2603","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2603.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2603.patch","merged_at":"2021-07-07T12:56:35Z"},"body":"Close #2598.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2603\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2603\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2602","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2602\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2602\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2602\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2602","id":938555712,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0OTE5MjMy","number":2602,"title":"Remove import of transformers","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-07T06:58:18Z","updated_at":"2021-07-12T14:10:22Z","closed_at":"2021-07-07T08:28:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2602","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2602","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2602.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2602.patch","merged_at":"2021-07-07T08:28:51Z"},"body":"When pickling a tokenizer within multiprocessing, check that is instance of transformers PreTrainedTokenizerBase without importing transformers.\r\n\r\nRelated to huggingface\/transformers#12549 and #502.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2602\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2602\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2601","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2601\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2601\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2601\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2601","id":938096396,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0NTQyNjY5","number":2601,"title":"Fix `filter` with multiprocessing in case all samples are discarded","user":{"login":"mxschmdt","id":4904985,"node_id":"MDQ6VXNlcjQ5MDQ5ODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4904985?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mxschmdt","html_url":"https:\/\/github.com\/mxschmdt","followers_url":"https:\/\/api.github.com\/users\/mxschmdt\/followers","following_url":"https:\/\/api.github.com\/users\/mxschmdt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mxschmdt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mxschmdt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mxschmdt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mxschmdt\/orgs","repos_url":"https:\/\/api.github.com\/users\/mxschmdt\/repos","events_url":"https:\/\/api.github.com\/users\/mxschmdt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mxschmdt\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-06T17:06:28Z","updated_at":"2021-07-12T14:10:35Z","closed_at":"2021-07-07T12:50:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2601","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2601","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2601.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2601.patch","merged_at":"2021-07-07T12:50:31Z"},"body":"Fixes #2600 \r\n\r\nAlso I moved the check for `num_proc` larger than dataset size added in #2566 up so that multiprocessing is not used with one process.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2601\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2601\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2600","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2600\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2600\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2600\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2600","id":938086745,"node_id":"MDU6SXNzdWU5MzgwODY3NDU=","number":2600,"title":"Crash when using multiprocessing (`num_proc` > 1) on `filter` and all samples are discarded","user":{"login":"mxschmdt","id":4904985,"node_id":"MDQ6VXNlcjQ5MDQ5ODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4904985?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mxschmdt","html_url":"https:\/\/github.com\/mxschmdt","followers_url":"https:\/\/api.github.com\/users\/mxschmdt\/followers","following_url":"https:\/\/api.github.com\/users\/mxschmdt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mxschmdt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mxschmdt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mxschmdt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mxschmdt\/orgs","repos_url":"https:\/\/api.github.com\/users\/mxschmdt\/repos","events_url":"https:\/\/api.github.com\/users\/mxschmdt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mxschmdt\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-06T16:53:25Z","updated_at":"2021-07-07T12:50:31Z","closed_at":"2021-07-07T12:50:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIf `filter` is applied to a dataset using multiprocessing (`num_proc` > 1) and all sharded datasets are empty afterwards (due to all samples being discarded), the program crashes.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import Dataset\r\ndata = Dataset.from_dict({'id': [0,1]})\r\ndata.filter(lambda x: False, num_proc=2)\r\n```\r\n\r\n## Expected results\r\nAn empty table should be returned without crashing.\r\n\r\n## Actual results\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 185, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py\", line 397, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 2143, in filter\r\n return self.map(\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 1738, in map\r\n result = concatenate_datasets(transformed_shards)\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 3267, in concatenate_datasets\r\n table = concat_tables(tables_to_concat, axis=axis)\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/table.py\", line 853, in concat_tables\r\n return ConcatenationTable.from_tables(tables, axis=axis)\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/table.py\", line 713, in from_tables\r\n blocks = to_blocks(tables[0])\r\nIndexError: list index out of range\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-5.12.11-300.fc34.x86_64-x86_64-with-glibc2.2.5\r\n- Python version: 3.8.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2600\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2600\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2599","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2599\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2599\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2599\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2599","id":937980229,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0NDQ2MTYx","number":2599,"title":"Update processing.rst with other export formats","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-06T14:50:38Z","updated_at":"2021-07-12T14:10:16Z","closed_at":"2021-07-07T08:05:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2599","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2599","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2599.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2599.patch","merged_at":"2021-07-07T08:05:48Z"},"body":"Add other supported export formats than CSV in the docs.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2599\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2599\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2598","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2598\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2598\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2598\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2598","id":937930632,"node_id":"MDU6SXNzdWU5Mzc5MzA2MzI=","number":2598,"title":"Unable to download omp dataset","user":{"login":"erikadistefano","id":25797960,"node_id":"MDQ6VXNlcjI1Nzk3OTYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25797960?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/erikadistefano","html_url":"https:\/\/github.com\/erikadistefano","followers_url":"https:\/\/api.github.com\/users\/erikadistefano\/followers","following_url":"https:\/\/api.github.com\/users\/erikadistefano\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/erikadistefano\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/erikadistefano\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/erikadistefano\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/erikadistefano\/orgs","repos_url":"https:\/\/api.github.com\/users\/erikadistefano\/repos","events_url":"https:\/\/api.github.com\/users\/erikadistefano\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/erikadistefano\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-06T14:00:52Z","updated_at":"2021-07-07T12:56:35Z","closed_at":"2021-07-07T12:56:35Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe omp dataset cannot be downloaded because of a DuplicatedKeysError\r\n\r\n## Steps to reproduce the bug\r\nfrom datasets import load_dataset\r\nomp = load_dataset('omp', 'posts_labeled')\r\nprint(omp)\r\n\r\n## Expected results\r\nThis code should download the omp dataset and print the dictionary\r\n\r\n## Actual results\r\nDownloading and preparing dataset omp\/posts_labeled (download: 1.27 MiB, generated: 13.31 MiB, post-processed: Unknown size, total: 14.58 MiB) to \/home\/erika_distefano\/.cache\/huggingface\/datasets\/omp\/posts_labeled\/1.1.0\/2fe5b067be3bff1d4588d5b0cbb9b5b22ae1b9d5b026a8ff572cd389f862735b...\r\n0 examples [00:00, ? examples\/s]2021-07-06 09:43:55.868815: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcudart.so.11.0\r\nTraceback (most recent call last): \r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 990, in _prepare_split\r\n writer.write(example, key)\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py\", line 338, in write\r\n self.check_duplicate_keys()\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py\", line 349, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 3326\r\nKeys should be unique and deterministic in nature\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"hf_datasets.py\", line 32, in \r\n omp = load_dataset('omp', 'posts_labeled')\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 748, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 992, in _prepare_split\r\n num_examples, num_bytes = writer.finalize()\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py\", line 409, in finalize\r\n self.check_duplicate_keys()\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py\", line 349, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 3326\r\nKeys should be unique and deterministic in nature\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.0\r\n- Platform: Ubuntu 18.04.4 LTS\r\n- Python version: 3.6.9\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2598\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2598\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2597","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2597\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2597\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2597\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2597","id":937917770,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0Mzk0MDIz","number":2597,"title":"Remove redundant prepare_module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-06T13:47:45Z","updated_at":"2021-07-12T14:10:52Z","closed_at":"2021-07-07T13:01:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2597","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2597","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2597.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2597.patch","merged_at":"2021-07-07T13:01:46Z"},"body":"I have noticed that after implementing `load_dataset_builder` (#2500), there is a redundant call to `prepare_module`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2597\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2597\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2596","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2596\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2596\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2596\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2596","id":937598914,"node_id":"MDU6SXNzdWU5Mzc1OTg5MTQ=","number":2596,"title":"Transformer Class on dataset","user":{"login":"arita37","id":18707623,"node_id":"MDQ6VXNlcjE4NzA3NjIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18707623?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arita37","html_url":"https:\/\/github.com\/arita37","followers_url":"https:\/\/api.github.com\/users\/arita37\/followers","following_url":"https:\/\/api.github.com\/users\/arita37\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arita37\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arita37\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arita37\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arita37\/orgs","repos_url":"https:\/\/api.github.com\/users\/arita37\/repos","events_url":"https:\/\/api.github.com\/users\/arita37\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arita37\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-07-06T07:27:15Z","updated_at":"2021-07-08T08:22:05Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Just wondering if you have intenttion to create\r\n\r\nTransformerClass :\r\n dataset --> dataset\r\n\r\nand make determnistic transformation (ie not fit).\r\n\r\n\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2596\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2596\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2595","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2595\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2595\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2595\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2595","id":937483120,"node_id":"MDU6SXNzdWU5Mzc0ODMxMjA=","number":2595,"title":"ModuleNotFoundError: No module named 'datasets.tasks' while importing common voice datasets","user":{"login":"profsatwinder","id":41314912,"node_id":"MDQ6VXNlcjQxMzE0OTEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/41314912?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/profsatwinder","html_url":"https:\/\/github.com\/profsatwinder","followers_url":"https:\/\/api.github.com\/users\/profsatwinder\/followers","following_url":"https:\/\/api.github.com\/users\/profsatwinder\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/profsatwinder\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/profsatwinder\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/profsatwinder\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/profsatwinder\/orgs","repos_url":"https:\/\/api.github.com\/users\/profsatwinder\/repos","events_url":"https:\/\/api.github.com\/users\/profsatwinder\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/profsatwinder\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-06T03:20:55Z","updated_at":"2021-07-06T05:59:49Z","closed_at":"2021-07-06T05:59:49Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Error traceback:\r\n---------------------------------------------------------------------------\r\nModuleNotFoundError Traceback (most recent call last)\r\n in ()\r\n 1 from datasets import load_dataset, load_metric\r\n 2 \r\n----> 3 common_voice_train = load_dataset(\"common_voice\", \"pa-IN\", split=\"train+validation\")\r\n 4 common_voice_test = load_dataset(\"common_voice\", \"pa-IN\", split=\"test\")\r\n\r\n9 frames\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/common_voice\/078d412587e9efeb0ae2e574da99c31e18844c496008d53dc5c60f4159ed639b\/common_voice.py in ()\r\n 19 \r\n 20 import datasets\r\n---> 21 from datasets.tasks import AutomaticSpeechRecognition\r\n 22 \r\n 23 \r\n\r\nModuleNotFoundError: No module named 'datasets.tasks'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2595\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2595\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2594","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2594\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2594\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2594\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2594","id":937294772,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzODc0NjIz","number":2594,"title":"Fix BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-05T18:24:10Z","updated_at":"2021-07-06T04:59:38Z","closed_at":"2021-07-06T04:59:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2594","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2594","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2594.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2594.patch","merged_at":"2021-07-06T04:59:38Z"},"body":"Fix BibTeX entry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2594\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2594\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2593","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2593\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2593\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2593\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2593","id":937242137,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzODMwMjcy","number":2593,"title":"Support pandas 1.3.0 read_csv","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-05T16:40:04Z","updated_at":"2021-07-05T17:14:14Z","closed_at":"2021-07-05T17:14:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2593","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2593","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2593.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2593.patch","merged_at":"2021-07-05T17:14:14Z"},"body":"Workaround for this issue in pandas 1.3.0 : https:\/\/github.com\/pandas-dev\/pandas\/issues\/42387\r\n\r\nThe csv reader raises an error:\r\n```python\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pandas\/io\/parsers\/readers.py in _refine_defaults_read(dialect, delimiter, delim_whitespace, engine, sep, error_bad_lines, warn_bad_lines, on_bad_lines, names, prefix, defaults)\r\n 1304 \r\n 1305 if names is not lib.no_default and prefix is not lib.no_default:\r\n-> 1306 raise ValueError(\"Specified named and prefix; you can only specify one.\")\r\n 1307 \r\n 1308 kwds[\"names\"] = None if names is lib.no_default else names\r\n\r\nValueError: Specified named and prefix; you can only specify one.\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2593\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2593\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2592","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2592\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2592\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2592\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2592","id":937060559,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNjc2MjA4","number":2592,"title":"Add c4.noclean infos","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-05T12:51:40Z","updated_at":"2021-07-05T13:15:53Z","closed_at":"2021-07-05T13:15:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2592","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2592","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2592.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2592.patch","merged_at":"2021-07-05T13:15:52Z"},"body":"Adding the data files checksums and the dataset size of the c4.noclean configuration of the C4 dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2592\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2592\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2591","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2591\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2591\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2591\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2591","id":936957975,"node_id":"MDU6SXNzdWU5MzY5NTc5NzU=","number":2591,"title":"Cached dataset overflowing disk space","user":{"login":"BirgerMoell","id":1704131,"node_id":"MDQ6VXNlcjE3MDQxMzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1704131?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BirgerMoell","html_url":"https:\/\/github.com\/BirgerMoell","followers_url":"https:\/\/api.github.com\/users\/BirgerMoell\/followers","following_url":"https:\/\/api.github.com\/users\/BirgerMoell\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BirgerMoell\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BirgerMoell\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BirgerMoell\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BirgerMoell\/orgs","repos_url":"https:\/\/api.github.com\/users\/BirgerMoell\/repos","events_url":"https:\/\/api.github.com\/users\/BirgerMoell\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BirgerMoell\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-05T10:43:19Z","updated_at":"2021-07-19T09:08:19Z","closed_at":"2021-07-19T09:08:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm training a Swedish Wav2vec2 model on a Linux GPU and having issues that the huggingface cached dataset folder is completely filling up my disk space (I'm training on a dataset of around 500 gb).\r\n\r\nThe cache folder is 500gb (and now my disk space is full).\r\n\r\nIs there a way to toggle caching or set the caching to be stored on a different device (I have another drive with 4 tb that could hold the caching files).\r\n\r\nThis might not technically be a bug, but I was unsure and I felt that the bug was the closest one.\r\n\r\nTraceback (most recent call last):\r\n File \"\/home\/birger\/miniconda3\/envs\/wav2vec2\/lib\/python3.7\/site-packages\/multiprocess\/pool.py\", line 121, in worker\r\n result = (True, func(*args, **kwds))\r\n File \"\/home\/birger\/miniconda3\/envs\/wav2vec2\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 186, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/birger\/miniconda3\/envs\/wav2vec2\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 397, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/birger\/miniconda3\/envs\/wav2vec2\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1983, in _map_single\r\n writer.finalize()\r\n File \"\/home\/birger\/miniconda3\/envs\/wav2vec2\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 418, in finalize\r\n self.pa_writer.close()\r\n File \"pyarrow\/ipc.pxi\", line 402, in pyarrow.lib._CRecordBatchWriter.close\r\n File \"pyarrow\/error.pxi\", line 97, in pyarrow.lib.check_status\r\nOSError: [Errno 28] Error writing bytes to file. Detail: [errno 28] No space left on device\r\n\"\"\"\r\n\r\nThe above exception was the direct cause of the following exception:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2591\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2591\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2590","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2590\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2590\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2590\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2590","id":936954348,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNTg1MDg2","number":2590,"title":"Add language tags","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-05T10:39:57Z","updated_at":"2021-07-05T10:58:48Z","closed_at":"2021-07-05T10:58:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2590","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2590","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2590.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2590.patch","merged_at":"2021-07-05T10:58:48Z"},"body":"This PR adds some missing language tags needed for ASR datasets in #2565 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2590\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2590\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2589","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2589\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2589\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2589\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2589","id":936825060,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNDc0OTQ0","number":2589,"title":"Support multilabel metrics","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":2,"created_at":"2021-07-05T08:19:25Z","updated_at":"2021-07-12T14:12:10Z","closed_at":"2021-07-08T08:40:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2589","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2589","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2589.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2589.patch","merged_at":"2021-07-08T08:40:15Z"},"body":"Currently, multilabel metrics are not supported because `predictions` and `references` are defined as `Value(\"int32\")`.\r\n\r\nThis PR creates a new feature type `OptionalSequence` which can act as either `Value(\"int32\")` or `Sequence(Value(\"int32\"))`, depending on the data passed.\r\n\r\n\r\nClose #2554.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2589\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2589\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2588","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2588\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2588\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2588\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2588","id":936795541,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNDQ5Njky","number":2588,"title":"Fix test_is_small_dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-05T07:46:26Z","updated_at":"2021-07-12T14:10:11Z","closed_at":"2021-07-06T17:09:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2588","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2588","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2588.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2588.patch","merged_at":"2021-07-06T17:09:30Z"},"body":"Remove environment variable fixture `env_max_in_memory_dataset_size`. This fixture does not work because env variable is read in datasets.config when first loading datasets, and it is never reread during tests.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2588\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2588\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2587","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2587\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2587\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2587\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2587","id":936771339,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNDI5NjQy","number":2587,"title":"Add aiohttp to tests extras require","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-05T07:14:01Z","updated_at":"2021-07-05T09:04:38Z","closed_at":"2021-07-05T09:04:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2587","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2587","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2587.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2587.patch","merged_at":"2021-07-05T09:04:38Z"},"body":"Currently, none of the streaming tests are runned within our CI test suite, because the streaming tests require aiohttp and this is missing from our tests extras require dependencies.\r\n\r\nOur CI test suite should be exhaustive and test all the library functionalities.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2587\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2587\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2586","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2586\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2586\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2586\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2586","id":936747588,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNDEwMDU3","number":2586,"title":"Fix misalignment in SQuAD","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-05T06:42:20Z","updated_at":"2021-07-12T14:11:10Z","closed_at":"2021-07-07T13:18:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2586","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2586","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2586.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2586.patch","merged_at":"2021-07-07T13:18:51Z"},"body":"Fix misalignment between:\r\n- the answer text and\r\n- the answer_start within the context\r\n\r\nby keeping original leading blank spaces in the context.\r\n\r\nFix #2585.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2586\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2586\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2585","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2585\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2585\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2585\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2585","id":936484419,"node_id":"MDU6SXNzdWU5MzY0ODQ0MTk=","number":2585,"title":"sqaud_v2 dataset contains misalignment between the answer text and the context value at the answer index","user":{"login":"mmajurski","id":9354454,"node_id":"MDQ6VXNlcjkzNTQ0NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9354454?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mmajurski","html_url":"https:\/\/github.com\/mmajurski","followers_url":"https:\/\/api.github.com\/users\/mmajurski\/followers","following_url":"https:\/\/api.github.com\/users\/mmajurski\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mmajurski\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mmajurski\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mmajurski\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mmajurski\/orgs","repos_url":"https:\/\/api.github.com\/users\/mmajurski\/repos","events_url":"https:\/\/api.github.com\/users\/mmajurski\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mmajurski\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-07-04T15:39:49Z","updated_at":"2021-07-07T13:18:51Z","closed_at":"2021-07-07T13:18:51Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe built in huggingface squad_v2 dataset that you can access via datasets.load_dataset contains mis-alignment between the answers['text'] and the characters in the context at the location specified by answers['answer_start'].\r\n\r\nFor example:\r\nid = '56d1f453e7d4791d009025bd'\r\nanswers = {'text': ['Pure Land'], 'answer_start': [146]}\r\nHowever the actual text in context at location 146 is 'ure Land,'\r\nWhich is an off-by-one error from the correct answer.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n\r\ndef check_context_answer_alignment(example):\r\n for a_idx in range(len(example['answers']['text'])):\r\n # check raw dataset for answer consistency between context and answer\r\n answer_text = example['answers']['text'][a_idx]\r\n a_st_idx = example['answers']['answer_start'][a_idx]\r\n a_end_idx = a_st_idx + len(example['answers']['text'][a_idx])\r\n answer_text_from_context = example['context'][a_st_idx:a_end_idx]\r\n if answer_text != answer_text_from_context:\r\n #print(example['id'])\r\n return False\r\n return True\r\n\r\ndataset = datasets.load_dataset('squad_v2', split='train', keep_in_memory=True)\r\n\r\nstart_len = len(dataset)\r\ndataset = dataset.filter(check_context_answer_alignment,\r\n num_proc=1,\r\n keep_in_memory=True)\r\nend_len = len(dataset)\r\nprint('{} instances contain mis-alignment between the answer text and answer index.'.format(start_len - end_len))\r\n```\r\n\r\n## Expected results\r\nThis code should result in 0 rows being filtered out from the dataset.\r\n\r\n## Actual results\r\nThis filter command results in 258 rows being flagged as containing a discrepancy between the text contained within answers['text'] and the text in example['context'] at the answers['answer_start'] location.\r\n\r\nThis code will reproduce the problem and produce the following count:\r\n\"258 instances contain mis-alignment between the answer text and answer index.\"\r\n\r\n## Environment info\r\nSteps to rebuilt the Conda environment:\r\n```\r\n# create a virtual environment to stuff all these packages into\r\nconda create -n round8 python=3.8 -y\r\n\r\n# activate the virtual environment\r\nconda activate round8\r\n\r\n# install pytorch (best done through conda to handle cuda dependencies)\r\nconda install pytorch torchvision torchtext cudatoolkit=11.1 -c pytorch-lts -c nvidia\r\n\r\npip install jsonpickle transformers datasets matplotlib\r\n```\r\n\r\nOS: Ubuntu 20.04\r\nPython 3.8\r\n\r\nResult of `conda env export`:\r\n```\r\nname: round8\r\nchannels:\r\n - pytorch-lts\r\n - nvidia\r\n - defaults\r\ndependencies:\r\n - _libgcc_mutex=0.1=main\r\n - _openmp_mutex=4.5=1_gnu\r\n - blas=1.0=mkl\r\n - brotlipy=0.7.0=py38h27cfd23_1003\r\n - bzip2=1.0.8=h7b6447c_0\r\n - ca-certificates=2021.5.25=h06a4308_1\r\n - certifi=2021.5.30=py38h06a4308_0\r\n - cffi=1.14.5=py38h261ae71_0\r\n - chardet=4.0.0=py38h06a4308_1003\r\n - cryptography=3.4.7=py38hd23ed53_0\r\n - cudatoolkit=11.1.74=h6bb024c_0\r\n - ffmpeg=4.2.2=h20bf706_0\r\n - freetype=2.10.4=h5ab3b9f_0\r\n - gmp=6.2.1=h2531618_2\r\n - gnutls=3.6.15=he1e5248_0\r\n - idna=2.10=pyhd3eb1b0_0\r\n - intel-openmp=2021.2.0=h06a4308_610\r\n - jpeg=9b=h024ee3a_2\r\n - lame=3.100=h7b6447c_0\r\n - lcms2=2.12=h3be6417_0\r\n - ld_impl_linux-64=2.35.1=h7274673_9\r\n - libffi=3.3=he6710b0_2\r\n - libgcc-ng=9.3.0=h5101ec6_17\r\n - libgomp=9.3.0=h5101ec6_17\r\n - libidn2=2.3.1=h27cfd23_0\r\n - libopus=1.3.1=h7b6447c_0\r\n - libpng=1.6.37=hbc83047_0\r\n - libstdcxx-ng=9.3.0=hd4cf53a_17\r\n - libtasn1=4.16.0=h27cfd23_0\r\n - libtiff=4.2.0=h85742a9_0\r\n - libunistring=0.9.10=h27cfd23_0\r\n - libuv=1.40.0=h7b6447c_0\r\n - libvpx=1.7.0=h439df22_0\r\n - libwebp-base=1.2.0=h27cfd23_0\r\n - lz4-c=1.9.3=h2531618_0\r\n - mkl=2021.2.0=h06a4308_296\r\n - mkl-service=2.3.0=py38h27cfd23_1\r\n - mkl_fft=1.3.0=py38h42c9631_2\r\n - mkl_random=1.2.1=py38ha9443f7_2\r\n - ncurses=6.2=he6710b0_1\r\n - nettle=3.7.3=hbbd107a_1\r\n - ninja=1.10.2=hff7bd54_1\r\n - numpy=1.20.2=py38h2d18471_0\r\n - numpy-base=1.20.2=py38hfae3a4d_0\r\n - olefile=0.46=py_0\r\n - openh264=2.1.0=hd408876_0\r\n - openssl=1.1.1k=h27cfd23_0\r\n - pillow=8.2.0=py38he98fc37_0\r\n - pip=21.1.2=py38h06a4308_0\r\n - pycparser=2.20=py_2\r\n - pyopenssl=20.0.1=pyhd3eb1b0_1\r\n - pysocks=1.7.1=py38h06a4308_0\r\n - python=3.8.10=h12debd9_8\r\n - pytorch=1.8.1=py3.8_cuda11.1_cudnn8.0.5_0\r\n - readline=8.1=h27cfd23_0\r\n - requests=2.25.1=pyhd3eb1b0_0\r\n - setuptools=52.0.0=py38h06a4308_0\r\n - six=1.16.0=pyhd3eb1b0_0\r\n - sqlite=3.35.4=hdfb4753_0\r\n - tk=8.6.10=hbc83047_0\r\n - torchtext=0.9.1=py38\r\n - torchvision=0.9.1=py38_cu111\r\n - typing_extensions=3.7.4.3=pyha847dfd_0\r\n - urllib3=1.26.4=pyhd3eb1b0_0\r\n - wheel=0.36.2=pyhd3eb1b0_0\r\n - x264=1!157.20191217=h7b6447c_0\r\n - xz=5.2.5=h7b6447c_0\r\n - zlib=1.2.11=h7b6447c_3\r\n - zstd=1.4.9=haebb681_0\r\n - pip:\r\n - click==8.0.1\r\n - cycler==0.10.0\r\n - datasets==1.8.0\r\n - dill==0.3.4\r\n - filelock==3.0.12\r\n - fsspec==2021.6.0\r\n - huggingface-hub==0.0.8\r\n - joblib==1.0.1\r\n - jsonpickle==2.0.0\r\n - kiwisolver==1.3.1\r\n - matplotlib==3.4.2\r\n - multiprocess==0.70.12.2\r\n - packaging==20.9\r\n - pandas==1.2.4\r\n - pyarrow==3.0.0\r\n - pyparsing==2.4.7\r\n - python-dateutil==2.8.1\r\n - pytz==2021.1\r\n - regex==2021.4.4\r\n - sacremoses==0.0.45\r\n - tokenizers==0.10.3\r\n - tqdm==4.49.0\r\n - transformers==4.6.1\r\n - xxhash==2.0.2\r\nprefix: \/home\/mmajurski\/anaconda3\/envs\/round8\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2585\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2585\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2584","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2584\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2584\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2584\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2584","id":936049736,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyODY2Njc1","number":2584,"title":"wi_locness: reference latest leaderboard on codalab","user":{"login":"aseifert","id":4944799,"node_id":"MDQ6VXNlcjQ5NDQ3OTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944799?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aseifert","html_url":"https:\/\/github.com\/aseifert","followers_url":"https:\/\/api.github.com\/users\/aseifert\/followers","following_url":"https:\/\/api.github.com\/users\/aseifert\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aseifert\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aseifert\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aseifert\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aseifert\/orgs","repos_url":"https:\/\/api.github.com\/users\/aseifert\/repos","events_url":"https:\/\/api.github.com\/users\/aseifert\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aseifert\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-02T20:26:22Z","updated_at":"2021-07-05T09:06:14Z","closed_at":"2021-07-05T09:06:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2584","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2584","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2584.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2584.patch","merged_at":"2021-07-05T09:06:14Z"},"body":"The dataset's author asked me to put this codalab link into the dataset's README.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2584\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2584\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2583","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2583\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2583\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2583\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2583","id":936034976,"node_id":"MDU6SXNzdWU5MzYwMzQ5NzY=","number":2583,"title":"Error iteration over IterableDataset using Torch DataLoader","user":{"login":"LeenaShekhar","id":12227436,"node_id":"MDQ6VXNlcjEyMjI3NDM2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12227436?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LeenaShekhar","html_url":"https:\/\/github.com\/LeenaShekhar","followers_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/followers","following_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/orgs","repos_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/repos","events_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-02T19:55:58Z","updated_at":"2021-07-20T09:04:45Z","closed_at":"2021-07-05T23:48:23Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI have an IterableDataset (created using streaming=True) and I am trying to create batches using Torch DataLoader class by passing this IterableDataset to it. This throws error which is pasted below. I can do the same by using Torch IterableDataset. One thing I noticed is that in the former case when I look at the dataloader.sampler class I get torch.utils.data.sampler.SequentialSampler while the latter one gives torch.utils.data.dataloader._InfiniteConstantSampler. \r\n\r\nI am not sure if this is how it is meant to be used, but that's what seemed reasonable to me. \r\n\r\n## Steps to reproduce the bug\r\n\r\n1. Does not work.\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset('oscar', \"unshuffled_deduplicated_en\", split='train', streaming=True)\r\n>>> dataloader = torch.utils.data.DataLoader(dataset, batch_size=4)\r\n>>> dataloader.sampler\r\n\r\n>>> for batch in dataloader:\r\n... print(batch)\r\n```\r\n\r\n2. Works.\r\n```python\r\nimport torch\r\nfrom torch.utils.data import Dataset, IterableDataset, DataLoader\r\nclass CustomIterableDataset(IterableDataset):\r\n 'Characterizes a dataset for PyTorch'\r\n def __init__(self, data):\r\n 'Initialization'\r\n self.data = data\r\n\r\n\r\n def __iter__(self):\r\n return iter(self.data)\r\n\r\n\r\ndata = list(range(12))\r\ndataset = CustomIterableDataset(data)\r\ndataloader = DataLoader(dataset, batch_size=4)\r\nprint(\"dataloader: \", dataloader.sampler)\r\nfor batch in dataloader:\r\n print(batch)\r\n```\r\n\r\n## Expected results\r\nTo get batches of data with the batch size as 4. Output from the latter one (2) though Datasource is different here so actual data is different.\r\ndataloader: \r\ntensor([0, 1, 2, 3])\r\ntensor([4, 5, 6, 7])\r\ntensor([ 8, 9, 10, 11])\r\n\r\n## Actual results\r\n\r\n\r\n...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/data\/leshekha\/lib\/HFDatasets\/lib\/python3.6\/site-packages\/torch\/utils\/data\/dataloader.py\", line 435, in __next__\r\n data = self._next_data()\r\n File \"\/data\/leshekha\/lib\/HFDatasets\/lib\/python3.6\/site-packages\/torch\/utils\/data\/dataloader.py\", line 474, in _next_data\r\n index = self._next_index() # may raise StopIteration\r\n File \"\/data\/leshekha\/lib\/HFDatasets\/lib\/python3.6\/site-packages\/torch\/utils\/data\/dataloader.py\", line 427, in _next_index\r\n return next(self._sampler_iter) # may raise StopIteration\r\n File \"\/data\/leshekha\/lib\/HFDatasets\/lib\/python3.6\/site-packages\/torch\/utils\/data\/sampler.py\", line 227, in __iter__\r\n for idx in self.sampler:\r\n File \"\/data\/leshekha\/lib\/HFDatasets\/lib\/python3.6\/site-packages\/torch\/utils\/data\/sampler.py\", line 67, in __iter__\r\n return iter(range(len(self.data_source)))\r\nTypeError: object of type 'IterableDataset' has no len()\r\n\r\n## Environment info\r\n\r\n- `datasets` version: '1.8.1.dev0'\r\n- Platform: Linux\r\n- Python version: Python 3.6.8\r\n- PyArrow version: '3.0.0'\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2583\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2583\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2582","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2582\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2582\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2582\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2582","id":935859104,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyNzAzNzg3","number":2582,"title":"Add skip and take","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-02T15:10:19Z","updated_at":"2021-07-05T16:06:40Z","closed_at":"2021-07-05T16:06:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2582","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2582","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2582.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2582.patch","merged_at":"2021-07-05T16:06:39Z"},"body":"As discussed in https:\/\/github.com\/huggingface\/datasets\/pull\/2375#discussion_r657084544 I added the `IterableDataset.skip` and `IterableDataset.take` methods that allows to do basic splitting of iterable datasets.\r\n\r\nYou can create new dataset with the first `n` examples using `IterableDataset.take()`, or you can get a dataset with the rest of the examples by skipping the first `n` examples with `IterableDataset.skip()`\r\n\r\nOne implementation detail:\r\n\r\nUsing `take` (or `skip`) prevents future dataset shuffling from shuffling the dataset shards, otherwise the taken examples could come from other shards. In this case it only uses the shuffle buffer.\r\nI would have loved to allow the shards of the taken examples to be shuffled anyway, but since we don't know in advance the length of each shard we don't know what shards to take or skip.\r\nI think this is ok though since users can shuffle before doing take or skip. I mentioned this in the documentation\r\n\r\ncc @vblagoje @lewtun ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2582\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":2,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2582\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2581","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2581\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2581\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2581\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2581","id":935783588,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyNjQwMDY4","number":2581,"title":"Faster search_batch for ElasticsearchIndex due to threading","user":{"login":"mwrzalik","id":1376337,"node_id":"MDQ6VXNlcjEzNzYzMzc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1376337?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mwrzalik","html_url":"https:\/\/github.com\/mwrzalik","followers_url":"https:\/\/api.github.com\/users\/mwrzalik\/followers","following_url":"https:\/\/api.github.com\/users\/mwrzalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mwrzalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mwrzalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mwrzalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mwrzalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/mwrzalik\/repos","events_url":"https:\/\/api.github.com\/users\/mwrzalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mwrzalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-02T13:42:07Z","updated_at":"2021-07-12T14:13:46Z","closed_at":"2021-07-12T09:52:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2581","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2581","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2581.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2581.patch","merged_at":"2021-07-12T09:52:51Z"},"body":"Hey, \r\nI think it makes sense to perform search_batch threaded, so ES can perform search in parallel.\r\nCheers!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2581\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2581\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2580","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2580\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2580\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2580\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2580","id":935767421,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyNjI2MTkz","number":2580,"title":"Fix Counter import","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-02T13:21:48Z","updated_at":"2021-07-02T14:37:47Z","closed_at":"2021-07-02T14:37:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2580","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2580","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2580.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2580.patch","merged_at":"2021-07-02T14:37:46Z"},"body":"Import from `collections` instead of `typing`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2580\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2580\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2579","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2579\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2579\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2579\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2579","id":935486894,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyMzkyNjYx","number":2579,"title":"Fix BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-02T07:10:40Z","updated_at":"2021-07-02T07:33:44Z","closed_at":"2021-07-02T07:33:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2579","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2579","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2579.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2579.patch","merged_at":"2021-07-02T07:33:44Z"},"body":"Add missing contributor to BibTeX entry.\r\n\r\ncc: @abhishekkrthakur @thomwolf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2579\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2579\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2578","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2578\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2578\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2578\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2578","id":935187497,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyMTQ0OTY2","number":2578,"title":"Support Zstandard compressed files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-07-01T20:22:34Z","updated_at":"2021-08-11T14:46:24Z","closed_at":"2021-07-05T10:50:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2578","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2578","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2578.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2578.patch","merged_at":"2021-07-05T10:50:27Z"},"body":"Close #2572.\r\n\r\ncc: @thomwolf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2578\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2578\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2576","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2576\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2576\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2576\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2576","id":934986761,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgxOTc5MTA1","number":2576,"title":"Add mC4","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-01T15:51:25Z","updated_at":"2021-07-02T14:50:56Z","closed_at":"2021-07-02T14:50:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2576","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2576","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2576.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2576.patch","merged_at":"2021-07-02T14:50:55Z"},"body":"AllenAI is now hosting the processed C4 and mC4 dataset in this repo: https:\/\/huggingface.co\/datasets\/allenai\/c4\r\nThanks a lot to them !\r\n\r\nIn this PR I added the mC4 dataset builder. It supports 108 languages\r\n\r\nYou can load it with\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nen_mc4 = load_dataset(\"mc4\", \"en\")\r\nfr_mc4 = load_dataset(\"mc4\", \"fr\")\r\nen_and_fr_mc4 = load_dataset(\"mc4\", languages=[\"en\", \"fr\"])\r\n```\r\n\r\nIt also supports streaming, if you don't want to download hundreds of GB of data:\r\n```python\r\nen_mc4 = load_dataset(\"mc4\", \"en\", streaming=True)\r\n```\r\n\r\nRegarding the dataset_infos.json, I will add them once I have them.\r\n\r\nAlso we can work on the dataset card at that will be at https:\/\/huggingface.co\/datasets\/mc4\r\nFor now I just added a link to https:\/\/huggingface.co\/datasets\/allenai\/c4 as well as a few sections","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2576\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2576\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2575","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2575\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2575\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2575\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2575","id":934876496,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgxODg0OTgy","number":2575,"title":"Add C4","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-01T13:58:08Z","updated_at":"2021-07-02T14:50:23Z","closed_at":"2021-07-02T14:50:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2575","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2575","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2575.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2575.patch","merged_at":"2021-07-02T14:50:23Z"},"body":"The old code for the C4 dataset was to generate the C4 with Apache Beam, as in Tensorflow Datasets.\r\nHowever AllenAI is now hosting the processed C4 dataset in this repo: https:\/\/huggingface.co\/datasets\/allenai\/c4\r\nThanks a lot to them for their amazing work !\r\n\r\nIn this PR I changed the script to download and prepare the data directly from this repo.\r\nIt has 4 variants: en, en.noblocklist, en.noclean, realnewslike\r\n\r\nYou can load it with\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nc4 = load_dataset(\"c4\", \"en\")\r\n```\r\n\r\nIt also supports streaming, if you don't want to download hundreds of GB of data:\r\n```python\r\nc4 = load_dataset(\"c4\", \"en\", streaming=True)\r\n```\r\n\r\nRegarding the dataset_infos.json, I haven't added the infos for en.noclean. I will add them once I have them.\r\n\r\nAlso we can work on the dataset card at https:\/\/huggingface.co\/datasets\/c4\r\nFor now I just added a link to https:\/\/huggingface.co\/datasets\/allenai\/c4 as well as a few sections","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2575\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2575\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2574","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2574\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2574\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2574\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2574","id":934632378,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgxNjczMzYy","number":2574,"title":"Add streaming in load a dataset docs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-01T09:32:53Z","updated_at":"2021-07-01T14:12:22Z","closed_at":"2021-07-01T14:12:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2574","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2574","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2574.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2574.patch","merged_at":"2021-07-01T14:12:21Z"},"body":"Mention dataset streaming on the \"loading a dataset\" page of the documentation","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2574\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2574\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2573","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2573\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2573\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2573\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2573","id":934584745,"node_id":"MDU6SXNzdWU5MzQ1ODQ3NDU=","number":2573,"title":"Finding right block-size with JSON loading difficult for user","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-01T08:48:35Z","updated_at":"2021-07-01T19:10:53Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As reported by @thomwolf, while loading a JSON Lines file with \"json\" loading script, he gets\r\n> json.decoder.JSONDecodeError: Extra data: line 2 column 1 (char 383)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2573\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2573\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2572","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2572\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2572\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2572\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2572","id":934573767,"node_id":"MDU6SXNzdWU5MzQ1NzM3Njc=","number":2572,"title":"Support Zstandard compressed files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-07-01T08:37:04Z","updated_at":"2021-07-05T10:50:27Z","closed_at":"2021-07-05T10:50:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Add support for Zstandard compressed files: https:\/\/facebook.github.io\/zstd\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2572\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2572\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2571","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2571\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2571\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2571\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2571","id":933791018,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgwOTQ2NzQ1","number":2571,"title":"Filter expected warning log from transformers","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-30T14:48:19Z","updated_at":"2021-07-02T04:08:17Z","closed_at":"2021-07-02T04:08:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2571","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2571","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2571.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2571.patch","merged_at":"2021-07-02T04:08:16Z"},"body":"Close #2569.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2571\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2571\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2570","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2570\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2570\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2570\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2570","id":933402521,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgwNjEzNzc0","number":2570,"title":"Minor fix docs format for bertscore","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-30T07:42:12Z","updated_at":"2021-06-30T15:31:01Z","closed_at":"2021-06-30T15:31:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2570","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2570","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2570.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2570.patch","merged_at":"2021-06-30T15:31:01Z"},"body":"Minor fix docs format for bertscore:\r\n- link to README\r\n- format of KWARGS_DESCRIPTION","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2570\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2570\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2569","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2569\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2569\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2569\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2569","id":933015797,"node_id":"MDU6SXNzdWU5MzMwMTU3OTc=","number":2569,"title":"Weights of model checkpoint not initialized for RobertaModel for Bertscore","user":{"login":"suzyahyah","id":2980993,"node_id":"MDQ6VXNlcjI5ODA5OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2980993?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/suzyahyah","html_url":"https:\/\/github.com\/suzyahyah","followers_url":"https:\/\/api.github.com\/users\/suzyahyah\/followers","following_url":"https:\/\/api.github.com\/users\/suzyahyah\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/suzyahyah\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/suzyahyah\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/suzyahyah\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/suzyahyah\/orgs","repos_url":"https:\/\/api.github.com\/users\/suzyahyah\/repos","events_url":"https:\/\/api.github.com\/users\/suzyahyah\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/suzyahyah\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-06-29T18:55:23Z","updated_at":"2021-07-01T07:08:59Z","closed_at":"2021-06-30T07:35:49Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When applying bertscore out of the box, \r\n\r\n```Some weights of the model checkpoint at roberta-large were not used when initializing RobertaModel: ['lm_head.decoder.weight', 'lm_head.bias', 'lm_head.dense.bias', 'lm_head.layer_norm.bias', 'lm_head.dense.weight', 'lm_head.layer_norm.weight']```\r\n\r\nFollowing the typical usage from https:\/\/huggingface.co\/docs\/datasets\/loading_metrics.html\r\n\r\n```\r\nfrom datasets import load_metric\r\nmetric = load_metric('bertscore')\r\n\r\n# Example of typical usage\r\nfor batch in dataset:\r\n inputs, references = batch\r\n predictions = model(inputs)\r\n metric.add_batch(predictions=predictions, references=references)\r\nscore = metric.compute(lang=\"en\")\r\n#score = metric.compute(model_type=\"roberta-large\") # gives the same error\r\n```\r\n\r\nI am concerned about this because my usage shouldn't require any further fine-tuning and most people would expect to use BertScore out of the box? I realised the huggingface code is a wrapper around https:\/\/github.com\/Tiiiger\/bert_score, but I think this repo is anyway relying on the model code and weights from huggingface repo.... \r\n\r\n## Environment info\r\n- `datasets` version: 1.7.0\r\n- Platform: Linux-5.4.0-1041-aws-x86_64-with-glibc2.27\r\n- Python version: 3.9.5\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2569\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2569\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2568","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2568\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2568\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2568\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2568","id":932934795,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgwMjE5MDU2","number":2568,"title":"Add interleave_datasets for map-style datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-29T17:19:24Z","updated_at":"2021-07-01T09:33:34Z","closed_at":"2021-07-01T09:33:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2568","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2568","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2568.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2568.patch","merged_at":"2021-07-01T09:33:32Z"},"body":"### Add interleave_datasets for map-style datasets\r\n\r\nAdd support for map-style datasets (i.e. `Dataset` objects) in `interleave_datasets`.\r\nIt was only supporting iterable datasets (i.e. `IterableDataset` objects).\r\n\r\n### Implementation details\r\n\r\nIt works by concatenating the datasets and then re-order the indices to make the new dataset.\r\n\r\n### TODO\r\n- [x] tests\r\n- [x] docs\r\n\r\nClose #2563 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2568\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2568\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2567","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2567\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2567\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2567\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2567","id":932933536,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgwMjE3OTY3","number":2567,"title":"Add ASR task and new languages to resources","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-29T17:18:01Z","updated_at":"2021-07-01T09:42:23Z","closed_at":"2021-07-01T09:42:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2567","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2567","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2567.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2567.patch","merged_at":"2021-07-01T09:42:09Z"},"body":"This PR adds a new `automatic-speech-recognition` task to the list of supported tasks in `tasks.json` and also includes a few new languages missing from `common_voice`.\r\n\r\nNote: I used the [Papers with Code list](https:\/\/www.paperswithcode.com\/area\/speech\/speech-recognition) as inspiration for the ASR subtasks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2567\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2567\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2566","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2566\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2566\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2566\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2566","id":932804725,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgwMTA2NzM0","number":2566,"title":"fix Dataset.map when num_procs > num rows","user":{"login":"connor-mccarthy","id":55268212,"node_id":"MDQ6VXNlcjU1MjY4MjEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55268212?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/connor-mccarthy","html_url":"https:\/\/github.com\/connor-mccarthy","followers_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/followers","following_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/orgs","repos_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/repos","events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-29T15:07:07Z","updated_at":"2021-07-01T09:11:13Z","closed_at":"2021-07-01T09:11:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2566","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2566","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2566.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2566.patch","merged_at":"2021-07-01T09:11:13Z"},"body":"closes #2470\r\n\r\n## Testing notes\r\nTo run updated tests:\r\n```sh\r\npytest tests\/test_arrow_dataset.py -k \"BaseDatasetTest and test_map_multiprocessing\" -s\r\n```\r\nWith Python code (to view warning):\r\n```python\r\nfrom datasets import Dataset\r\n\r\n\r\ndataset = Dataset.from_dict({\"x\": [\"sample\"]})\r\nprint(len(dataset))\r\ndataset.map(lambda x: x, num_proc=10)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2566\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2566\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2565","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2565\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2565\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2565\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2565","id":932445439,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5Nzg3NTI4","number":2565,"title":"Inject templates for ASR datasets","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-29T10:02:01Z","updated_at":"2021-07-05T14:26:26Z","closed_at":"2021-07-05T14:26:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2565","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2565","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2565.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2565.patch","merged_at":"2021-07-05T14:26:26Z"},"body":"This PR adds ASR templates for 5 of the most common speech datasets on the Hub, where \"common\" is defined by the number of models trained on them.\r\n\r\nI also fixed a bunch of the tags in the READMEs \ud83d\ude0e ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2565\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2565\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2564","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2564\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2564\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2564\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2564","id":932389639,"node_id":"MDU6SXNzdWU5MzIzODk2Mzk=","number":2564,"title":"concatenate_datasets for iterable datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-29T08:59:41Z","updated_at":"2021-06-29T08:59:41Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently `concatenate_datasets` only works for map-style `Dataset`.\r\n\r\nIt would be nice to have it work for `IterableDataset` objects as well.\r\n\r\nIt would simply chain the iterables of the iterable datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2564\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2564\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2563","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2563\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2563\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2563\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2563","id":932387639,"node_id":"MDU6SXNzdWU5MzIzODc2Mzk=","number":2563,"title":"interleave_datasets for map-style datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-29T08:57:24Z","updated_at":"2021-07-01T09:33:33Z","closed_at":"2021-07-01T09:33:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently the `interleave_datasets` functions only works for `IterableDataset`.\r\nLet's make it work for map-style `Dataset` objects as well.\r\n\r\nIt would work the same way: either alternate between the datasets in order or randomly given probabilities specified by the user.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2563\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2563\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2562","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2562\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2562\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2562\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2562","id":932333436,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5NjkyMjQ2","number":2562,"title":"Minor fix in loading metrics docs","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-29T07:55:11Z","updated_at":"2021-06-29T17:21:22Z","closed_at":"2021-06-29T17:21:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2562","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2562","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2562.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2562.patch","merged_at":"2021-06-29T17:21:22Z"},"body":"Make some minor fixes in \"Loading metrics\" docs.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2562\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2562\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2561","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2561\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2561\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2561\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2561","id":932321725,"node_id":"MDU6SXNzdWU5MzIzMjE3MjU=","number":2561,"title":"Existing cache for local dataset builder file updates is ignored with `ignore_verifications=True`","user":{"login":"apsdehal","id":3616806,"node_id":"MDQ6VXNlcjM2MTY4MDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3616806?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apsdehal","html_url":"https:\/\/github.com\/apsdehal","followers_url":"https:\/\/api.github.com\/users\/apsdehal\/followers","following_url":"https:\/\/api.github.com\/users\/apsdehal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apsdehal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apsdehal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apsdehal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apsdehal\/orgs","repos_url":"https:\/\/api.github.com\/users\/apsdehal\/repos","events_url":"https:\/\/api.github.com\/users\/apsdehal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apsdehal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-29T07:43:03Z","updated_at":"2021-06-30T12:55:24Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIf i have local file defining a dataset builder class and I load it using `load_dataset` functionality, the existing cache is ignored whenever the file is update even with `ignore_verifications=True`. This slows down debugging and cache generator for very large datasets.\r\n\r\n## Steps to reproduce the bug\r\n\r\n- Create a local dataset builder class\r\n- load the local builder class file using `load_dataset` and let the cache build\r\n- update the file's content\r\n- The cache should rebuilt.\r\n\r\n## Expected results\r\n\r\nWith `ignore_verifications=True`, `load_dataset` should pick up existing cache.\r\n\r\n## Actual results\r\n\r\nCreates new cache.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-5.4.0-52-generic-x86_64-with-debian-bullseye-sid\r\n- Python version: 3.7.7\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2561\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2561\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2560","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2560\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2560\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2560\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2560","id":932143634,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5NTMyODk4","number":2560,"title":"fix Dataset.map when num_procs > num rows","user":{"login":"connor-mccarthy","id":55268212,"node_id":"MDQ6VXNlcjU1MjY4MjEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55268212?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/connor-mccarthy","html_url":"https:\/\/github.com\/connor-mccarthy","followers_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/followers","following_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/orgs","repos_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/repos","events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-29T02:24:11Z","updated_at":"2021-06-29T15:00:18Z","closed_at":"2021-06-29T14:53:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2560","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2560","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2560.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2560.patch","merged_at":null},"body":"closes #2470\r\n\r\n## Testing notes\r\nTo run updated tests:\r\n```sh\r\npytest tests\/test_arrow_dataset.py -k \"BaseDatasetTest and test_map_multiprocessing\" -s\r\n```\r\nWith Python code (to view warning):\r\n```python\r\nfrom datasets import Dataset\r\n\r\n\r\ndataset = Dataset.from_dict({\"x\": [\"sample\"]})\r\nprint(len(dataset))\r\ndataset.map(lambda x: x, num_proc=10)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2560\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2560\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2559","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2559\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2559\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2559\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2559","id":931849724,"node_id":"MDU6SXNzdWU5MzE4NDk3MjQ=","number":2559,"title":"Memory usage consistently increases when processing a dataset with `.map`","user":{"login":"apsdehal","id":3616806,"node_id":"MDQ6VXNlcjM2MTY4MDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3616806?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apsdehal","html_url":"https:\/\/github.com\/apsdehal","followers_url":"https:\/\/api.github.com\/users\/apsdehal\/followers","following_url":"https:\/\/api.github.com\/users\/apsdehal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apsdehal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apsdehal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apsdehal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apsdehal\/orgs","repos_url":"https:\/\/api.github.com\/users\/apsdehal\/repos","events_url":"https:\/\/api.github.com\/users\/apsdehal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apsdehal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-28T18:31:58Z","updated_at":"2021-06-29T08:43:00Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI have a HF dataset with image paths stored in it and I am trying to load those image paths using `.map` with `num_proc=80`. I am noticing that the memory usage consistently keeps on increasing with time. I tried using `DEFAULT_WRITER_BATCH_SIZE=10` in the builder to decrease arrow writer's batch size but that doesn't seem to help.\r\n\r\n## Steps to reproduce the bug\r\n\r\nProviding code as it is would be hard. I can provide a MVP if that helps.\r\n\r\n## Expected results\r\n\r\nMemory usage should become consistent after some time following the launch of processing.\r\n\r\n## Actual results\r\n\r\nMemory usage keeps on increasing.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-5.4.0-52-generic-x86_64-with-debian-bullseye-sid\r\n- Python version: 3.7.7\r\n- PyArrow version: 3.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2559\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2559\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2558","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2558\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2558\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2558\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2558","id":931736647,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5MTg0Njk1","number":2558,"title":"Update: WebNLG - update checksums","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-28T16:16:37Z","updated_at":"2021-06-28T17:23:17Z","closed_at":"2021-06-28T17:23:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2558","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2558","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2558.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2558.patch","merged_at":"2021-06-28T17:23:16Z"},"body":"The master branch changed so I computed the new checksums.\r\n\r\nI also pinned a specific revision so that it doesn't happen again in the future.\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2553","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2558\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2558\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2557","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2557\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2557\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2557\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2557","id":931633823,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5MDk4ODg3","number":2557,"title":"Fix `fever` keys","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-28T14:27:02Z","updated_at":"2021-06-28T16:11:30Z","closed_at":"2021-06-28T16:11:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2557","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2557","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2557.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2557.patch","merged_at":"2021-06-28T16:11:29Z"},"body":"The keys has duplicates since they were reset to 0 after each file.\r\n\r\nI fixed it by taking into account the file index as well.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2557\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2557\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2556","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2556\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2556\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2556\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2556","id":931595872,"node_id":"MDU6SXNzdWU5MzE1OTU4NzI=","number":2556,"title":"Better DuplicateKeysError error to help the user debug the issue","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-28T13:50:57Z","updated_at":"2021-06-28T13:50:57Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As mentioned in https:\/\/github.com\/huggingface\/datasets\/issues\/2552 it would be nice to improve the error message when a dataset fails to build because there are duplicate example keys.\r\n\r\nThe current one is\r\n```python\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 48\r\nKeys should be unique and deterministic in nature\r\n```\r\n\r\nand we could have something that guides the user to debugging the issue:\r\n```python\r\nDuplicateKeysError: both 42th and 1337th examples have the same keys `48`.\r\nPlease fix the dataset script at \r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2556\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2556\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2555","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2555\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2555\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2555\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2555","id":931585485,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5MDU4ODM3","number":2555,"title":"Fix code_search_net keys","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-28T13:40:23Z","updated_at":"2021-09-02T08:24:43Z","closed_at":"2021-06-28T14:10:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2555","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2555","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2555.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2555.patch","merged_at":"2021-06-28T14:10:35Z"},"body":"There were duplicate keys in the `code_search_net` dataset, as reported in https:\/\/github.com\/huggingface\/datasets\/issues\/2552\r\n\r\nI fixed the keys (it was an addition of the file and row indices, which was causing collisions)\r\n\r\nFix #2552.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2555\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2555\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2554","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2554\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2554\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2554\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2554","id":931453855,"node_id":"MDU6SXNzdWU5MzE0NTM4NTU=","number":2554,"title":"Multilabel metrics not supported","user":{"login":"GuillemGSubies","id":37592763,"node_id":"MDQ6VXNlcjM3NTkyNzYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37592763?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/GuillemGSubies","html_url":"https:\/\/github.com\/GuillemGSubies","followers_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/followers","following_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/orgs","repos_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/repos","events_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-28T11:09:46Z","updated_at":"2021-10-13T12:29:13Z","closed_at":"2021-07-08T08:40:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I try to use a metric like F1 macro I get the following error:\r\n\r\n```\r\nTypeError: int() argument must be a string, a bytes-like object or a number, not 'list'\r\n```\r\nThere is an explicit casting here:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/fc79f61cbbcfa0e8c68b28c0a8257f17e768a075\/src\/datasets\/features.py#L274\r\n\r\nAnd looks like this is because here\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/fc79f61cbbcfa0e8c68b28c0a8257f17e768a075\/metrics\/f1\/f1.py#L88\r\n\r\nthe features can only be integers, so we cannot use that F1 for multilabel. Instead, if I create the following F1 (ints replaced with sequence of ints), it will work:\r\n\r\n```python\r\nclass F1(datasets.Metric):\r\n def _info(self):\r\n return datasets.MetricInfo(\r\n description=_DESCRIPTION,\r\n citation=_CITATION,\r\n inputs_description=_KWARGS_DESCRIPTION,\r\n features=datasets.Features(\r\n {\r\n \"predictions\": datasets.Sequence(datasets.Value(\"int32\")),\r\n \"references\": datasets.Sequence(datasets.Value(\"int32\")),\r\n }\r\n ),\r\n reference_urls=[\"https:\/\/scikit-learn.org\/stable\/modules\/generated\/sklearn.metrics.f1_score.html\"],\r\n )\r\n\r\n def _compute(self, predictions, references, labels=None, pos_label=1, average=\"binary\", sample_weight=None):\r\n return {\r\n \"f1\": f1_score(\r\n references,\r\n predictions,\r\n labels=labels,\r\n pos_label=pos_label,\r\n average=average,\r\n sample_weight=sample_weight,\r\n ),\r\n }\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2554\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2554\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2553","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2553\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2553\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2553\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2553","id":931365926,"node_id":"MDU6SXNzdWU5MzEzNjU5MjY=","number":2553,"title":"load_dataset(\"web_nlg\") NonMatchingChecksumError","user":{"login":"alexandrethm","id":33730312,"node_id":"MDQ6VXNlcjMzNzMwMzEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33730312?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexandrethm","html_url":"https:\/\/github.com\/alexandrethm","followers_url":"https:\/\/api.github.com\/users\/alexandrethm\/followers","following_url":"https:\/\/api.github.com\/users\/alexandrethm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexandrethm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexandrethm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexandrethm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexandrethm\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexandrethm\/repos","events_url":"https:\/\/api.github.com\/users\/alexandrethm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexandrethm\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-06-28T09:26:46Z","updated_at":"2021-06-28T17:23:39Z","closed_at":"2021-06-28T17:23:16Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi! It seems the WebNLG dataset gives a NonMatchingChecksumError.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('web_nlg', name=\"release_v3.0_en\", split=\"dev\")\r\n```\r\n\r\nGives\r\n\r\n```\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/gitlab.com\/shimorina\/webnlg-dataset\/-\/archive\/master\/webnlg-dataset-master.zip']\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.0\r\n- Platform: macOS-11.3.1-x86_64-i386-64bit\r\n- Python version: 3.9.4\r\n- PyArrow version: 3.0.0\r\n\r\nAlso tested on Linux, with python 3.6.8","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2553\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2553\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2552","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2552\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2552\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2552\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2552","id":931354687,"node_id":"MDU6SXNzdWU5MzEzNTQ2ODc=","number":2552,"title":"Keys should be unique error on code_search_net","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-06-28T09:15:20Z","updated_at":"2021-09-06T14:08:30Z","closed_at":"2021-09-02T08:25:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nLoading `code_search_net` seems not possible at the moment.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n>>> load_dataset('code_search_net')\r\nDownloading: 8.50kB [00:00, 3.09MB\/s] \r\nDownloading: 19.1kB [00:00, 10.1MB\/s] \r\nNo config specified, defaulting to: code_search_net\/all\r\nDownloading and preparing dataset code_search_net\/all (download: 4.77 GiB, generated: 5.99 GiB, post-processed: Unknown size, total: 10.76 GiB) to \/Users\/thomwolf\/.cache\/huggingface\/datasets\/code_search_net\/all\/1.0.0\/b3e8278faf5d67da1d06981efbeac3b76a2900693bd2239bbca7a4a3b0d6e52a...\r\nTraceback (most recent call last): \r\n File \"\/Users\/thomwolf\/Documents\/GitHub\/datasets\/src\/datasets\/builder.py\", line 1067, in _prepare_split\r\n writer.write(example, key)\r\n File \"\/Users\/thomwolf\/Documents\/GitHub\/datasets\/src\/datasets\/arrow_writer.py\", line 343, in write\r\n self.check_duplicate_keys()\r\n File \"\/Users\/thomwolf\/Documents\/GitHub\/datasets\/src\/datasets\/arrow_writer.py\", line 354, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 48\r\nKeys should be unique and deterministic in nature\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.1.dev0\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 2.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2552\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2552\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2551","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2551\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2551\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2551\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2551","id":930967978,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc4NTQzMjg1","number":2551,"title":"Fix FileSystems documentation","user":{"login":"connor-mccarthy","id":55268212,"node_id":"MDQ6VXNlcjU1MjY4MjEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55268212?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/connor-mccarthy","html_url":"https:\/\/github.com\/connor-mccarthy","followers_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/followers","following_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/orgs","repos_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/repos","events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-27T16:18:42Z","updated_at":"2021-06-28T13:09:55Z","closed_at":"2021-06-28T13:09:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2551","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2551","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2551.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2551.patch","merged_at":"2021-06-28T13:09:54Z"},"body":"### What this fixes:\r\nThis PR resolves several issues I discovered in the documentation on the `datasets.filesystems` module ([this page](https:\/\/huggingface.co\/docs\/datasets\/filesystems.html)).\r\n\r\n### What were the issues?\r\nWhen I originally tried implementing the code examples I faced several bugs attributed to:\r\n\r\n- out of date [botocore](https:\/\/github.com\/boto\/botocore) call signatures\r\n- capitalization errors in the `S3FileSystem` class name (written as `S3Filesystem` in one place)\r\n- call signature errors for the `S3FileSystem` class constructor (uses parameter `sessions` instead of `session` in some places) (see [`s3fs`](https:\/\/s3fs.readthedocs.io\/en\/latest\/api.html#s3fs.core.S3FileSystem) for where this constructor signature is defined)\r\n\r\n### Testing\/reviewing notes\r\nInstructions for generating the documentation locally: [here](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/docs#generating-the-documentation).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2551\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2551\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2550","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2550\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2550\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2550\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2550","id":930951287,"node_id":"MDU6SXNzdWU5MzA5NTEyODc=","number":2550,"title":"Allow for incremental cumulative metric updates in a distributed setup","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-27T15:00:58Z","updated_at":"2021-09-26T13:42:39Z","closed_at":"2021-09-26T13:42:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, using a metric allows for one of the following:\r\n- Per example\/batch metrics\r\n- Cumulative metrics over the whole data\r\n\r\nWhat I'd like is to have an efficient way to get cumulative metrics over the examples\/batches added so far, in order to display it as part of the progress bar during training\/evaluation.\r\n\r\nSince most metrics are just an average of per-example metrics (which aren't?), an efficient calculation can be done as follows:\r\n`((score_cumulative * n_cumulative) + (score_new * n_new)) \/ (n_cumulative+ n_new)`\r\nwhere `n` and `score` refer to number of examples and metric score, `cumulative` refers to the cumulative metric and `new` refers to the addition of new examples.\r\n\r\nIf you don't want to add this capability in the library, a simple solution exists so users can do it themselves:\r\nIt is easy to implement for a single process setup, but in a distributed one there is no way to get the correct `n_new`.\r\nThe solution for this is to return the number of examples that was used to compute the metrics in `.compute()` by adding the following line here:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/5a3221785311d0ce86c2785b765e86bd6997d516\/src\/datasets\/metric.py#L402-L403\r\n```\r\noutput[\"number_of_examples\"] = len(predictions)\r\n```\r\nand also remove the log message here so it won't spam:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/3db67f5ff6cbf807b129d2b4d1107af27623b608\/src\/datasets\/metric.py#L411\r\n\r\nIf this change is ok with you, I'll open a pull request.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2550\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2550\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2549","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2549\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2549\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2549\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2549","id":929819093,"node_id":"MDU6SXNzdWU5Mjk4MTkwOTM=","number":2549,"title":"Handling unlabeled datasets","user":{"login":"nelson-liu","id":7272031,"node_id":"MDQ6VXNlcjcyNzIwMzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7272031?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nelson-liu","html_url":"https:\/\/github.com\/nelson-liu","followers_url":"https:\/\/api.github.com\/users\/nelson-liu\/followers","following_url":"https:\/\/api.github.com\/users\/nelson-liu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nelson-liu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nelson-liu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nelson-liu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nelson-liu\/orgs","repos_url":"https:\/\/api.github.com\/users\/nelson-liu\/repos","events_url":"https:\/\/api.github.com\/users\/nelson-liu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nelson-liu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-25T04:32:23Z","updated_at":"2021-06-25T21:07:57Z","closed_at":"2021-06-25T21:07:56Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi!\r\n\r\nIs there a way for datasets to produce unlabeled instances (e.g., the `ClassLabel` can be nullable).\r\n\r\nFor example, I want to use the MNLI dataset reader ( https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/multi_nli\/multi_nli.py ) on a file that doesn't have the `gold_label` field. I tried setting `\"label\": data.get(\"gold_label\")`, but got the following error:\r\n\r\n```\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 748, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 989, in _prepare_split\r\n example = self.info.features.encode_example(record)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 953, in encode_example\r\n return encode_nested_example(self, example)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 848, in encode_nested_example\r\n k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 848, in \r\n k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 875, in encode_nested_example\r\n return schema.encode_example(obj)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 653, in encode_example\r\n if not -1 <= example_data < self.num_classes:\r\nTypeError: '<=' not supported between instances of 'int' and 'NoneType'\r\n```\r\n\r\nWhat's the proper way to handle reading unlabeled datasets, especially for downstream usage with Transformers?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2549\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2549\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2548","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2548\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2548\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2548\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2548","id":929232831,"node_id":"MDU6SXNzdWU5MjkyMzI4MzE=","number":2548,"title":"Field order issue in loading json","user":{"login":"luyug","id":55288513,"node_id":"MDQ6VXNlcjU1Mjg4NTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55288513?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/luyug","html_url":"https:\/\/github.com\/luyug","followers_url":"https:\/\/api.github.com\/users\/luyug\/followers","following_url":"https:\/\/api.github.com\/users\/luyug\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/luyug\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/luyug\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/luyug\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/luyug\/orgs","repos_url":"https:\/\/api.github.com\/users\/luyug\/repos","events_url":"https:\/\/api.github.com\/users\/luyug\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/luyug\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-24T13:29:53Z","updated_at":"2021-06-24T14:36:43Z","closed_at":"2021-06-24T14:34:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe `load_dataset` function expects columns in alphabetical order when loading json files.\r\n\r\nSimilar bug was previously reported for csv in #623 and fixed in #684.\r\n## Steps to reproduce the bug\r\n\r\nFor a json file `j.json`,\r\n```\r\n{\"c\":321, \"a\": 1, \"b\": 2}\r\n```\r\nRunning the following,\r\n```\r\nf= datasets.Features({'a': Value('int32'), 'b': Value('int32'), 'c': Value('int32')})\r\njson_data = datasets.load_dataset('json', data_files='j.json', features=f)\r\n```\r\n\r\n\r\n## Expected results\r\nA successful load.\r\n## Actual results\r\n```\r\nFile \"pyarrow\/table.pxi\", line 1409, in pyarrow.lib.Table.cast\r\nValueError: Target schema's field names are not matching the table's field names: ['c', 'a', 'b'], ['a', 'b', 'c']\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-3.10.0-957.1.3.el7.x86_64-x86_64-with-glibc2.10\r\n- Python version: 3.8.8\r\n- PyArrow version: 3.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2548\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2548\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2547","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2547\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2547\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2547\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2547","id":929192329,"node_id":"MDU6SXNzdWU5MjkxOTIzMjk=","number":2547,"title":"Dataset load_from_disk is too slow","user":{"login":"alexvaca0","id":35173563,"node_id":"MDQ6VXNlcjM1MTczNTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35173563?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexvaca0","html_url":"https:\/\/github.com\/alexvaca0","followers_url":"https:\/\/api.github.com\/users\/alexvaca0\/followers","following_url":"https:\/\/api.github.com\/users\/alexvaca0\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexvaca0\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexvaca0\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexvaca0\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexvaca0\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexvaca0\/repos","events_url":"https:\/\/api.github.com\/users\/alexvaca0\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexvaca0\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-24T12:45:44Z","updated_at":"2021-06-25T14:56:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"@lhoestq \r\n## Describe the bug\r\nIt's not normal that I have to wait 7-8 hours for a dataset to be loaded from disk, as there are no preprocessing steps, it's only loading it with load_from_disk. I have 96 cpus, however only 1 is used for this, which is inefficient. Moreover, its usage is at 1%... This is happening in the context of a language model training, therefore I'm wasting 100$ each time I have to load the dataset from disk again (because the spot instance was stopped by aws and I need to relaunch it for example). \r\n\r\n## Steps to reproduce the bug\r\nJust get the oscar in spanish (around 150GGB) and try to first save in disk and then load the processed dataset. It's not dependent on the task you're doing, it just depends on the size of the text dataset.\r\n\r\n## Expected results\r\nI expect the dataset to be loaded in a normal time, by using the whole machine for loading it, I mean if you store the dataset in multiple files (.arrow) and then load it from multiple files, you can use multiprocessing for that and therefore don't waste so much time. \r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Ubuntu 18\r\n- Python version: 3.8\r\n\r\n\r\nI've seen you're planning to include a streaming mode for load_dataset, but that only saves the downloading and processing time, that's not being a problem for me, you cannot save the pure loading from disk time, therefore that's not a solution for my use case or for anyone who wants to use your library for training a language model. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2547\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2547\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2546","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2546\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2546\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2546\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2546","id":929091689,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2OTk2MjQ0","number":2546,"title":"Add license to the Cambridge English Write & Improve + LOCNESS dataset card","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-24T10:39:29Z","updated_at":"2021-06-24T10:52:01Z","closed_at":"2021-06-24T10:52:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2546","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2546","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2546.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2546.patch","merged_at":"2021-06-24T10:52:01Z"},"body":"As noticed in https:\/\/github.com\/huggingface\/datasets\/pull\/2539, the licensing information was missing for this dataset.\r\n\r\nI added it and I also filled a few other empty sections.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2546\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2546\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2545","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2545\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2545\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2545\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2545","id":929016580,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2OTMxOTYw","number":2545,"title":"Fix DuplicatedKeysError in drop dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-24T09:10:39Z","updated_at":"2021-06-24T14:57:08Z","closed_at":"2021-06-24T14:57:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2545","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2545","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2545.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2545.patch","merged_at":"2021-06-24T14:57:08Z"},"body":"Close #2542.\r\n\r\ncc: @VictorSanh.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2545\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2545\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2544","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2544\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2544\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2544\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2544","id":928900827,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2ODM1MjYz","number":2544,"title":"Fix logging levels","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-24T06:41:36Z","updated_at":"2021-06-25T13:40:19Z","closed_at":"2021-06-25T13:40:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2544","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2544","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2544.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2544.patch","merged_at":"2021-06-25T13:40:19Z"},"body":"Sometimes default `datasets` logging can be too verbose. One approach could be reducing some logging levels, from info to debug, or from warning to info.\r\n\r\nClose #2543.\r\n\r\ncc: @stas00 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2544\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2544\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2543","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2543\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2543\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2543\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2543","id":928571915,"node_id":"MDU6SXNzdWU5Mjg1NzE5MTU=","number":2543,"title":"switching some low-level log.info's to log.debug?","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-23T19:26:55Z","updated_at":"2021-06-25T13:40:19Z","closed_at":"2021-06-25T13:40:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In https:\/\/github.com\/huggingface\/transformers\/pull\/12276 we are now changing the examples to have `datasets` on the same log level as `transformers`, so that one setting can do a consistent logging across all involved components.\r\n\r\nThe trouble is that now we get a ton of these:\r\n\r\n```\r\n06\/23\/2021 12:15:31 - INFO - datasets.utils.filelock - Lock 139627640431136 acquired on \/home\/stas\/.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow.lock\r\n06\/23\/2021 12:15:31 - INFO - datasets.arrow_writer - Done writing 50 examples in 12280 bytes \/home\/stas\/.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow.\r\n06\/23\/2021 12:15:31 - INFO - datasets.arrow_dataset - Set __getitem__(key) output type to python objects for no columns (when key is int or slice) and don't output other (un-formatted) columns.\r\n06\/23\/2021 12:15:31 - INFO - datasets.utils.filelock - Lock 139627640431136 released on \/home\/stas\/.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow.lock\r\n```\r\n\r\nMay I suggest that these can be `log.debug` as it's no informative to the user.\r\n\r\nMore examples: these are not informative - too much information:\r\n```\r\n06\/23\/2021 12:14:26 - INFO - datasets.load - Checking \/home\/stas\/.cache\/huggingface\/datasets\/downloads\/459933f1fe47711fad2f6ff8110014ff189120b45ad159ef5b8e90ea43a174fa.e23e7d1259a8c6274a82a42a8936dd1b87225302c6dc9b7261beb3bc2daac640.py for additional imports.\r\n06\/23\/2021 12:14:27 - INFO - datasets.builder - Constructing Dataset for split train, validation, test, from \/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/0d9fb3e814712c785176ad8cdb9f465fbe6479000ee6546725db30ad8a8b5f8a\r\n\r\n```\r\n\r\nWhile these are:\r\n```\r\n06\/23\/2021 12:14:27 - INFO - datasets.info - Loading Dataset Infos from \/home\/stas\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt16\/0d9fb3e814712c785176ad8cdb9f465fbe6479000ee6546725db30ad8a8b5f8a\r\n06\/23\/2021 12:14:27 - WARNING - datasets.builder - Reusing dataset wmt16 (\/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/0d9fb3e814712c785176ad8cdb9f465fbe6479000ee6546725db30ad8a8b5f8a)\r\n```\r\n\r\nI also realize that `transformers` examples don't have do use `info` for `datasets` to let the default `warning` keep logging to less noisy.\r\n\r\nBut I think currently the log levels are slightly misused and skewed by 1 level. Many `warnings` will better be `info`s and most `info`s be `debug`.\r\n\r\ne.g.:\r\n\r\n```\r\n06\/23\/2021 12:14:27 - WARNING - datasets.builder - Reusing dataset wmt16 (\/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/0d9fb3e814712c785176ad8cdb9f465fbe6479000ee6546725db30ad8a8b5f8a)\r\n```\r\n\r\nwhy is this a warning? it is informing me that the cache is used, there is nothing to be worried about. I'd have it as `info`.\r\n\r\nWarnings are typically something that's bordering error or the first thing to check when things don't work as expected.\r\n\r\ninfrequent info is there to inform of the different stages or important events.\r\n\r\nEverything else is debug.\r\n\r\nAt least the way I understand things. \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2543\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2543\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2542","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2542\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2542\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2542\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2542","id":928540382,"node_id":"MDU6SXNzdWU5Mjg1NDAzODI=","number":2542,"title":"`datasets.keyhash.DuplicatedKeysError` for `drop` and `adversarial_qa\/adversarialQA`","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-06-23T18:41:16Z","updated_at":"2021-06-25T21:50:05Z","closed_at":"2021-06-24T14:57:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nFailure to generate the datasets (`drop` and subset `adversarialQA` from `adversarial_qa`) because of duplicate keys.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset(\"drop\")\r\nload_dataset(\"adversarial_qa\", \"adversarialQA\")\r\n```\r\n\r\n## Expected results\r\nThe examples keys should be unique.\r\n\r\n## Actual results\r\n```bash\r\n>>> load_dataset(\"drop\")\r\nUsing custom data configuration default\r\nDownloading and preparing dataset drop\/default (download: 7.92 MiB, generated: 111.88 MiB, post-processed: Unknown size, total: 119.80 MiB) to \/home\/hf\/.cache\/huggingface\/datasets\/drop\/default\/0.1.0\/7a94f1e2bb26c4b5c75f89857c06982967d7416e5af935a9374b9bccf5068026...\r\nTraceback (most recent call last): \r\n File \"\", line 1, in \r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 751, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 992, in _prepare_split\r\n num_examples, num_bytes = writer.finalize()\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 409, in finalize\r\n self.check_duplicate_keys()\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 349, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 28553293-d719-441b-8f00-ce3dc6df5398\r\nKeys should be unique and deterministic in nature\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.7.0\r\n- Platform: Linux-5.4.0-1044-gcp-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2542\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2542\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2541","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2541\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2541\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2541\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2541","id":928529078,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2NTIwNDgx","number":2541,"title":"update discofuse link cc @ekQ","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-23T18:24:58Z","updated_at":"2021-06-28T14:34:51Z","closed_at":"2021-06-28T14:34:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2541","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2541","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2541.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2541.patch","merged_at":"2021-06-28T14:34:50Z"},"body":"Updating the discofuse link: https:\/\/github.com\/google-research-datasets\/discofuse\/commit\/fd4b120cb3dd19a417e7f3b5432010b574b5eeee","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2541\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2541\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2540","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2540\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2540\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2540\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2540","id":928433892,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2NDM5NTM1","number":2540,"title":"Remove task templates if required features are removed during `Dataset.map`","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-23T16:20:25Z","updated_at":"2021-06-24T14:41:15Z","closed_at":"2021-06-24T13:34:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2540","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2540","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2540.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2540.patch","merged_at":"2021-06-24T13:34:03Z"},"body":"This PR fixes a bug reported by @craffel where removing a dataset's columns during `Dataset.map` triggered a `KeyError` because the `TextClassification` template tried to access the removed columns during `DatasetInfo.__post_init__`:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n# `yelp_polarity` comes with a `TextClassification` template\r\nds = load_dataset(\"yelp_polarity\", split=\"test\")\r\nds\r\n# Dataset({\r\n# features: ['text', 'label'],\r\n# num_rows: 38000\r\n# })\r\n\r\n# Triggers KeyError: 'label' - oh noes!\r\nds.map(lambda x: {\"inputs\": 0}, remove_columns=ds.column_names)\r\n```\r\n\r\nI wrote a unit test to make sure I could reproduce the error and then patched a fix.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2540\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2540\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2539","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2539\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2539\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2539\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2539","id":927952429,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2MDI5MDY5","number":2539,"title":"remove wi_locness dataset due to licensing issues","user":{"login":"aseifert","id":4944799,"node_id":"MDQ6VXNlcjQ5NDQ3OTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944799?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aseifert","html_url":"https:\/\/github.com\/aseifert","followers_url":"https:\/\/api.github.com\/users\/aseifert\/followers","following_url":"https:\/\/api.github.com\/users\/aseifert\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aseifert\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aseifert\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aseifert\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aseifert\/orgs","repos_url":"https:\/\/api.github.com\/users\/aseifert\/repos","events_url":"https:\/\/api.github.com\/users\/aseifert\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aseifert\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-06-23T07:35:32Z","updated_at":"2021-06-25T14:52:42Z","closed_at":"2021-06-25T14:52:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2539","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2539","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2539.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2539.patch","merged_at":null},"body":"It was brought to my attention that this dataset's license is not only missing, but also prohibits redistribution. I contacted the original author to apologize for this oversight and asked if we could still use it, but unfortunately we can't and the author kindly asked to take down this dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2539\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2539\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2538","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2538\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2538\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2538\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2538","id":927940691,"node_id":"MDU6SXNzdWU5Mjc5NDA2OTE=","number":2538,"title":"Loading partial dataset when debugging","user":{"login":"reachtarunhere","id":9061913,"node_id":"MDQ6VXNlcjkwNjE5MTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9061913?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/reachtarunhere","html_url":"https:\/\/github.com\/reachtarunhere","followers_url":"https:\/\/api.github.com\/users\/reachtarunhere\/followers","following_url":"https:\/\/api.github.com\/users\/reachtarunhere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/reachtarunhere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/reachtarunhere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/reachtarunhere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/reachtarunhere\/orgs","repos_url":"https:\/\/api.github.com\/users\/reachtarunhere\/repos","events_url":"https:\/\/api.github.com\/users\/reachtarunhere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/reachtarunhere\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-06-23T07:19:52Z","updated_at":"2021-07-29T14:10:33Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am using PyTorch Lightning along with datasets (thanks for so many datasets already prepared and the great splits). \r\n\r\nEvery time I execute load_dataset for the imdb dataset it takes some time even if I specify a split involving very few samples. I guess this due to hashing as per the other issues.\r\n\r\nIs there a way to only load part of the dataset on load_dataset? This would really speed up my workflow.\r\nSomething like a debug mode would really help. Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2538\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2538\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2537","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2537\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2537\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2537\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2537","id":927472659,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1NjI1OTY3","number":2537,"title":"Add Parquet loader + from_parquet and to_parquet","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-22T17:28:23Z","updated_at":"2021-06-30T16:31:03Z","closed_at":"2021-06-30T16:30:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2537","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2537","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2537.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2537.patch","merged_at":"2021-06-30T16:30:58Z"},"body":"Continuation of #2247 \r\n\r\nI added a \"parquet\" dataset builder, as well as the methods `Dataset.from_parquet` and `Dataset.to_parquet`.\r\nAs usual, the data are converted to arrow in a batched way to avoid loading everything in memory.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2537\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2537\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2536","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2536\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2536\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2536\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2536","id":927338639,"node_id":"MDU6SXNzdWU5MjczMzg2Mzk=","number":2536,"title":"Use `Audio` features for `AutomaticSpeechRecognition` task template","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-22T15:07:21Z","updated_at":"2021-06-22T15:16:51Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In #2533 we added a task template for speech recognition that relies on the file paths to the audio files. As pointed out by @SBrandeis this is brittle as it doesn't port easily across different OS'. \r\n\r\nThe solution is to use dedicated `Audio` features when casting the dataset. These features are not yet available in `datasets`, but should be included in the `AutomaticSpeechRecognition` template once they are.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2536\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2536\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2535","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2535\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2535\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2535\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2535","id":927334349,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1NTA3MTAw","number":2535,"title":"Improve Features docs","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-22T15:03:27Z","updated_at":"2021-06-23T13:40:43Z","closed_at":"2021-06-23T13:40:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2535","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2535","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2535.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2535.patch","merged_at":"2021-06-23T13:40:43Z"},"body":"- Fix rendering and cross-references in Features docs\r\n- Add docstrings to Features methods","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2535\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2535\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2534","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2534\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2534\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2534\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2534","id":927201435,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1MzkzODg0","number":2534,"title":"Sync with transformers disabling NOTSET","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-22T12:54:21Z","updated_at":"2021-06-24T14:42:47Z","closed_at":"2021-06-24T14:42:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2534","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2534","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2534.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2534.patch","merged_at":"2021-06-24T14:42:47Z"},"body":"Close #2528.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2534\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2534\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2533","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2533\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2533\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2533\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2533","id":927193264,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1Mzg2OTMw","number":2533,"title":"Add task template for automatic speech recognition","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-22T12:45:02Z","updated_at":"2021-06-23T16:14:46Z","closed_at":"2021-06-23T15:56:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2533","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2533","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2533.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2533.patch","merged_at":"2021-06-23T15:56:57Z"},"body":"This PR adds a task template for automatic speech recognition. In this task, the input is a path to an audio file which the model consumes to produce a transcription.\r\n\r\nUsage:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom datasets.tasks import AutomaticSpeechRecognition\r\n\r\nds = load_dataset(\"timit_asr\", split=\"train[:10]\")\r\n# Dataset({\r\n# features: ['file', 'text', 'phonetic_detail', 'word_detail', 'dialect_region', 'sentence_type', 'speaker_id', 'id'],\r\n# num_rows: 10\r\n# })\r\n\r\ntask = AutomaticSpeechRecognition(audio_file_column=\"file\", transcription_column=\"text\")\r\nds.prepare_for_task(task)\r\n# Dataset({\r\n# features: ['audio_file', 'transcription'],\r\n# num_rows: 10\r\n# })\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2533\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2533\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2532","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2532\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2532\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2532\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2532","id":927063196,"node_id":"MDU6SXNzdWU5MjcwNjMxOTY=","number":2532,"title":"Tokenizer's normalization preprocessor cause misalignment in return_offsets_mapping for tokenizer classification task","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-22T10:08:18Z","updated_at":"2021-06-23T05:17:25Z","closed_at":"2021-06-23T05:17:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"[This colab notebook](https:\/\/colab.research.google.com\/drive\/151gKyo0YIwnlznrOHst23oYH_a3mAe3Z?usp=sharing) implements a token classification input pipeline extending the logic from [this hugging example](https:\/\/huggingface.co\/transformers\/custom_datasets.html#tok-ner).\r\n\r\nThe pipeline works fine with most instance in different languages, but unfortunately, [the Japanese Kana ligature (a form of abbreviation? I don't know Japanese well)](https:\/\/en.wikipedia.org\/wiki\/Kana_ligature) break the alignment of `return_offsets_mapping`:\r\n![image](https:\/\/user-images.githubusercontent.com\/50871412\/122904371-db192700-d382-11eb-8917-1775db76db69.png)\r\n\r\nWithout the try catch block, it riase `ValueError: NumPy boolean array indexing assignment cannot assign 88 input values to the 87 output values where the mask is true`, example shown here [(another colab notebook)](https:\/\/colab.research.google.com\/drive\/1MmOqf3ppzzdKKyMWkn0bJy6DqzOO0SSm?usp=sharing)\r\n\r\nIt is clear that the normalizer is the process that break the alignment, as it is observed that `tokenizer._tokenizer.normalizer.normalize_str('\u30ff')` return '\u30b3\u30c8'.\r\n\r\nOne workaround is to include `tokenizer._tokenizer.normalizer.normalize_str` before the tokenizer preprocessing pipeline, which is also provided in the [first colab notebook](https:\/\/colab.research.google.com\/drive\/151gKyo0YIwnlznrOHst23oYH_a3mAe3Z?usp=sharing) with the name `udposTestDatasetWorkaround`.\r\n\r\nI guess similar logics should be included inside the tokenizer and the offsets_mapping generation process such that user don't need to include them in their code. But I don't understand the code of tokenizer well that I think I am not able to do this.\r\n\r\np.s.\r\n**I am using my own dataset building script in the provided example, but the script should be equivalent to the changes made by this [update](https:\/\/github.com\/huggingface\/datasets\/pull\/2466)**\r\n`get_dataset `is just a simple wrapping for `load_dataset`\r\nand the `tokenizer` is just `XLMRobertaTokenizerFast.from_pretrained(\"xlm-roberta-large\")`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2532\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2532\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2531","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2531\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2531\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2531\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2531","id":927017924,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1MjM2MDYz","number":2531,"title":"Fix dev version","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-22T09:17:10Z","updated_at":"2021-06-22T09:47:10Z","closed_at":"2021-06-22T09:47:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2531","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2531","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2531.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2531.patch","merged_at":"2021-06-22T09:47:09Z"},"body":"The dev version that ends in `.dev0` should be greater than the current version.\r\nHowever it happens that `1.8.0 > 1.8.0.dev0` for example.\r\nTherefore we need to use `1.8.1.dev0` for example in this case.\r\n\r\nI updated the dev version to use `1.8.1.dev0`, and I also added a comment in the setup.py in the release steps about this.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2531\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2531\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2530","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2530\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2530\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2530\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2530","id":927013773,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1MjMyNDk0","number":2530,"title":"Fixed label parsing in the ProductReviews dataset","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-22T09:12:45Z","updated_at":"2021-06-22T12:55:20Z","closed_at":"2021-06-22T12:52:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2530","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2530","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2530.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2530.patch","merged_at":"2021-06-22T12:52:40Z"},"body":"Fixed issue with parsing dataset labels. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2530\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2530\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2529","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2529\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2529\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2529\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2529","id":926378812,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc0NjkxNjA5","number":2529,"title":"Add summarization template","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-21T16:08:31Z","updated_at":"2021-06-23T14:22:11Z","closed_at":"2021-06-23T13:30:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2529","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2529","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2529.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2529.patch","merged_at":"2021-06-23T13:30:10Z"},"body":"This PR adds a task template for text summarization. As far as I can tell, we do not need to distinguish between \"extractive\" or \"abstractive\" summarization - both can be handled with this template.\r\n\r\nUsage:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom datasets.tasks import Summarization\r\n\r\nds = load_dataset(\"xsum\", split=\"train\")\r\n# Dataset({\r\n# features: ['document', 'summary', 'id'],\r\n# num_rows: 204045\r\n# })\r\n\r\nsummarization = Summarization(text_column=\"document\", summary_column=\"summary\")\r\nds.prepare_for_task(summarization)\r\n# Dataset({\r\n# features: ['text', 'summary'],\r\n# num_rows: 204045\r\n# })\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2529\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2529\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2528","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2528\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2528\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2528\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2528","id":926314656,"node_id":"MDU6SXNzdWU5MjYzMTQ2NTY=","number":2528,"title":"Logging cannot be set to NOTSET similar to transformers","user":{"login":"joshzwiebel","id":34662010,"node_id":"MDQ6VXNlcjM0NjYyMDEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34662010?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joshzwiebel","html_url":"https:\/\/github.com\/joshzwiebel","followers_url":"https:\/\/api.github.com\/users\/joshzwiebel\/followers","following_url":"https:\/\/api.github.com\/users\/joshzwiebel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joshzwiebel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joshzwiebel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joshzwiebel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joshzwiebel\/orgs","repos_url":"https:\/\/api.github.com\/users\/joshzwiebel\/repos","events_url":"https:\/\/api.github.com\/users\/joshzwiebel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joshzwiebel\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-21T15:04:54Z","updated_at":"2021-06-24T14:42:47Z","closed_at":"2021-06-24T14:42:47Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIn the transformers library you can set the verbosity level to logging.NOTSET to work around the usage of tqdm and IPywidgets, however in Datasets this is no longer possible. This is because transformers set the verbosity level of tqdm with [this](https:\/\/github.com\/huggingface\/transformers\/blob\/b53bc55ba9bb10d5ee279eab51a2f0acc5af2a6b\/src\/transformers\/file_utils.py#L1449) \r\n`disable=bool(logging.get_verbosity() == logging.NOTSET)`\r\nand datasets accomplishes this like [so](https:\/\/github.com\/huggingface\/datasets\/blob\/83554e410e1ab8c6f705cfbb2df7953638ad3ac1\/src\/datasets\/utils\/file_utils.py#L493)\r\n`not_verbose = bool(logger.getEffectiveLevel() > WARNING)`\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\nimport logging\r\ndatasets.logging.get_verbosity = lambda : logging.NOTSET\r\ndatasets.load_dataset(\"patrickvonplaten\/librispeech_asr_dummy\")\r\n```\r\n\r\n## Expected results\r\nThe code should download and load the dataset as normal without displaying progress bars\r\n\r\n## Actual results\r\n```ImportError Traceback (most recent call last)\r\n in \r\n----> 1 datasets.load_dataset(\"patrickvonplaten\/librispeech_asr_dummy\")\r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, task, **config_kwargs)\r\n 713 dataset=True,\r\n 714 return_resolved_file_path=True,\r\n--> 715 use_auth_token=use_auth_token,\r\n 716 )\r\n 717 # Set the base path for downloads as the parent of the script location\r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, dynamic_modules_path, return_resolved_file_path, **download_kwargs)\r\n 350 file_path = hf_bucket_url(path, filename=name, dataset=False)\r\n 351 try:\r\n--> 352 local_path = cached_path(file_path, download_config=download_config)\r\n 353 except FileNotFoundError:\r\n 354 raise FileNotFoundError(\r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py in cached_path(url_or_filename, download_config, **download_kwargs)\r\n 289 use_etag=download_config.use_etag,\r\n 290 max_retries=download_config.max_retries,\r\n--> 291 use_auth_token=download_config.use_auth_token,\r\n 292 )\r\n 293 elif os.path.exists(url_or_filename):\r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag, max_retries, use_auth_token)\r\n 668 headers=headers,\r\n 669 cookies=cookies,\r\n--> 670 max_retries=max_retries,\r\n 671 )\r\n 672 \r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py in http_get(url, temp_file, proxies, resume_size, headers, cookies, timeout, max_retries)\r\n 493 initial=resume_size,\r\n 494 desc=\"Downloading\",\r\n--> 495 disable=not_verbose,\r\n 496 )\r\n 497 for chunk in response.iter_content(chunk_size=1024):\r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/tqdm\/notebook.py in __init__(self, *args, **kwargs)\r\n 217 total = self.total * unit_scale if self.total else self.total\r\n 218 self.container = self.status_printer(\r\n--> 219 self.fp, total, self.desc, self.ncols)\r\n 220 self.sp = self.display\r\n 221 \r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/tqdm\/notebook.py in status_printer(_, total, desc, ncols)\r\n 95 if IProgress is None: # #187 #451 #558 #872\r\n 96 raise ImportError(\r\n---> 97 \"IProgress not found. Please update jupyter and ipywidgets.\"\r\n 98 \" See https:\/\/ipywidgets.readthedocs.io\/en\/stable\"\r\n 99 \"\/user_install.html\")\r\n\r\nImportError: IProgress not found. Please update jupyter and ipywidgets. See https:\/\/ipywidgets.readthedocs.io\/en\/stable\/user_install.html\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-5.4.95-42.163.amzn2.x86_64-x86_64-with-debian-10.8\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\nI am running this code on Deepnote and which important to this issue **does not** support IPywidgets\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2528\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2528\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2527","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2527\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2527\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2527\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2527","id":926031525,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc0MzkzNjQ5","number":2527,"title":"Replace bad `n>1M` size tag","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-21T09:42:35Z","updated_at":"2021-06-21T15:06:50Z","closed_at":"2021-06-21T15:06:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2527","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2527","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2527.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2527.patch","merged_at":"2021-06-21T15:06:49Z"},"body":"Some datasets were still using the old `n>1M` tag which has been replaced with tags `1M1M`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2527\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2527\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2526","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2526\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2526\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2526\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2526","id":925929228,"node_id":"MDU6SXNzdWU5MjU5MjkyMjg=","number":2526,"title":"Add COCO datasets","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":{"login":"merveenoyan","id":53175384,"node_id":"MDQ6VXNlcjUzMTc1Mzg0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53175384?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/merveenoyan","html_url":"https:\/\/github.com\/merveenoyan","followers_url":"https:\/\/api.github.com\/users\/merveenoyan\/followers","following_url":"https:\/\/api.github.com\/users\/merveenoyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/merveenoyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/merveenoyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/merveenoyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/merveenoyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/merveenoyan\/repos","events_url":"https:\/\/api.github.com\/users\/merveenoyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/merveenoyan\/received_events","type":"User","site_admin":false},"assignees":[{"login":"merveenoyan","id":53175384,"node_id":"MDQ6VXNlcjUzMTc1Mzg0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53175384?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/merveenoyan","html_url":"https:\/\/github.com\/merveenoyan","followers_url":"https:\/\/api.github.com\/users\/merveenoyan\/followers","following_url":"https:\/\/api.github.com\/users\/merveenoyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/merveenoyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/merveenoyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/merveenoyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/merveenoyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/merveenoyan\/repos","events_url":"https:\/\/api.github.com\/users\/merveenoyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/merveenoyan\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2021-06-21T07:48:32Z","updated_at":"2021-12-20T13:33:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** COCO\r\n- **Description:** COCO is a large-scale object detection, segmentation, and captioning dataset.\r\n- **Paper + website:** https:\/\/cocodataset.org\/#home\r\n- **Data:** https:\/\/cocodataset.org\/#download\r\n- **Motivation:** It would be great to have COCO available in HuggingFace datasets, as we are moving beyond just text. COCO includes multi-modalities (images + text), as well as a huge amount of images annotated with objects, segmentation masks, keypoints etc., on which models like DETR (which I recently added to HuggingFace Transformers) are trained. Currently, one needs to download everything from the website and place it in a local folder, but it would be much easier if we can directly access it through the datasets API.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2526\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2526\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2525","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2525\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2525\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2525\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2525","id":925896358,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc0Mjc5MTgy","number":2525,"title":"Use scikit-learn package rather than sklearn in setup.py","user":{"login":"lesteve","id":1680079,"node_id":"MDQ6VXNlcjE2ODAwNzk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1680079?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lesteve","html_url":"https:\/\/github.com\/lesteve","followers_url":"https:\/\/api.github.com\/users\/lesteve\/followers","following_url":"https:\/\/api.github.com\/users\/lesteve\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lesteve\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lesteve\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lesteve\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lesteve\/orgs","repos_url":"https:\/\/api.github.com\/users\/lesteve\/repos","events_url":"https:\/\/api.github.com\/users\/lesteve\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lesteve\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-21T07:04:25Z","updated_at":"2021-06-21T10:01:13Z","closed_at":"2021-06-21T08:57:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2525","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2525","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2525.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2525.patch","merged_at":"2021-06-21T08:57:33Z"},"body":"The sklearn package is an historical thing and should probably not be used by anyone, see https:\/\/github.com\/scikit-learn\/scikit-learn\/issues\/8215#issuecomment-344679114 for some caveats.\r\n\r\nNote: this affects only TESTS_REQUIRE so I guess only developers not end users.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2525\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2525\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2524","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2524\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2524\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2524\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2524","id":925610934,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc0MDQzNzk1","number":2524,"title":"Raise FileNotFoundError in WindowsFileLock","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-20T14:25:11Z","updated_at":"2021-06-28T09:56:22Z","closed_at":"2021-06-28T08:47:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2524","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2524","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2524.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2524.patch","merged_at":"2021-06-28T08:47:39Z"},"body":"Closes #2443 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2524\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2524\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2523","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2523\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2523\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2523\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2523","id":925421008,"node_id":"MDU6SXNzdWU5MjU0MjEwMDg=","number":2523,"title":"Fr","user":{"login":"aDrIaNo34500","id":71971234,"node_id":"MDQ6VXNlcjcxOTcxMjM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/71971234?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aDrIaNo34500","html_url":"https:\/\/github.com\/aDrIaNo34500","followers_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/followers","following_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/orgs","repos_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/repos","events_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-19T15:56:32Z","updated_at":"2021-06-19T18:48:23Z","closed_at":"2021-06-19T18:48:23Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"__Originally posted by @lewtun in https:\/\/github.com\/huggingface\/datasets\/pull\/2469__","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2523\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2523\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2522","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2522\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2522\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2522\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2522","id":925334379,"node_id":"MDU6SXNzdWU5MjUzMzQzNzk=","number":2522,"title":"Documentation Mistakes in Dataset: emotion","user":{"login":"GDGauravDutta","id":62606251,"node_id":"MDQ6VXNlcjYyNjA2MjUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/62606251?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/GDGauravDutta","html_url":"https:\/\/github.com\/GDGauravDutta","followers_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/followers","following_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/orgs","repos_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/repos","events_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-19T07:08:57Z","updated_at":"2022-01-25T11:13:59Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As per documentation,\r\nDataset: emotion\r\nHomepage: https:\/\/github.com\/dair-ai\/emotion_dataset\r\n\r\nDataset: https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/emotion\/emotion.py\r\n\r\nPermalink: https:\/\/huggingface.co\/datasets\/viewer\/?dataset=emotion\r\n\r\nEmotion is a dataset of English Twitter messages with eight basic emotions: anger, anticipation, disgust, fear, joy, sadness, surprise, and trust. For more detailed information please refer to the paper.\r\n\r\nBut when we view the data, there are only 6 emotions, anger, fear, joy, sadness, surprise, and trust.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2522\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2522\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2521","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2521\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2521\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2521\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2521","id":925030685,"node_id":"MDExOlB1bGxSZXF1ZXN0NjczNTgxNzQ4","number":2521,"title":"Insert text classification template for Emotion dataset","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-18T15:56:19Z","updated_at":"2021-06-21T09:22:31Z","closed_at":"2021-06-21T09:22:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2521","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2521","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2521.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2521.patch","merged_at":"2021-06-21T09:22:31Z"},"body":"This PR includes a template and updated `dataset_infos.json` for the `emotion` dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2521\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2521\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2520","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2520\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2520\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2520\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2520","id":925015004,"node_id":"MDU6SXNzdWU5MjUwMTUwMDQ=","number":2520,"title":"Datasets with tricky task templates","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":2067401494,"node_id":"MDU6TGFiZWwyMDY3NDAxNDk0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Dataset%20discussion","name":"Dataset discussion","color":"72f99f","default":false,"description":"Discussions on the datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-18T15:33:57Z","updated_at":"2021-06-18T15:46:26Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm collecting a list of datasets here that don't follow the \"standard\" taxonomy and require further investigation to implement task templates for.\r\n\r\n## Text classification\r\n\r\n* [hatexplain](https:\/\/huggingface.co\/datasets\/hatexplain): ostensibly a form of text classification, but not in the standard `(text, target)` format and each sample appears to be tokenized.\r\n* [muchocine](https:\/\/huggingface.co\/datasets\/muchocine): contains two candidate text columns (long-form and summary) which in principle requires two `TextClassification` templates which is not currently supported ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2520\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2520\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2519","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2519\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2519\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2519\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2519","id":924903240,"node_id":"MDExOlB1bGxSZXF1ZXN0NjczNDcyMzYy","number":2519,"title":"Improve performance of pandas arrow extractor","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-18T13:24:41Z","updated_at":"2021-06-21T09:06:06Z","closed_at":"2021-06-21T09:06:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2519","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2519","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2519.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2519.patch","merged_at":"2021-06-21T09:06:06Z"},"body":"While reviewing PR #2505, I noticed that pandas arrow extractor could be refactored to be faster.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2519\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2519\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2518","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2518\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2518\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2518\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2518","id":924654100,"node_id":"MDExOlB1bGxSZXF1ZXN0NjczMjU5Nzg1","number":2518,"title":"Add task templates for tydiqa and xquad","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-18T08:06:34Z","updated_at":"2021-06-18T15:01:17Z","closed_at":"2021-06-18T14:50:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2518","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2518","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2518.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2518.patch","merged_at":"2021-06-18T14:50:33Z"},"body":"This PR adds question-answering templates to the remaining datasets that are linked to a model on the Hub.\r\n\r\nNotes: \r\n\r\n* I could not test the tydiqa implementation since I don't have enough disk space \ud83d\ude22 . But I am confident the template works :)\r\n* there exist other datasets like `fquad` and `mlqa` which are candidates for question-answering templates, but some work is needed to handle the ordering of nested column described in #2434 \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2518\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2518\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2517","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2517\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2517\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2517\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2517","id":924643345,"node_id":"MDExOlB1bGxSZXF1ZXN0NjczMjUwODk1","number":2517,"title":"Fix typo in MatthewsCorrelation class name","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-18T07:53:06Z","updated_at":"2021-06-18T08:43:55Z","closed_at":"2021-06-18T08:43:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2517","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2517","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2517.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2517.patch","merged_at":"2021-06-18T08:43:55Z"},"body":"Close #2513.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2517\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2517\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2516","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2516\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2516\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2516\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2516","id":924597470,"node_id":"MDU6SXNzdWU5MjQ1OTc0NzA=","number":2516,"title":"datasets.map pickle issue resulting in invalid mapping function","user":{"login":"david-waterworth","id":5028974,"node_id":"MDQ6VXNlcjUwMjg5NzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5028974?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/david-waterworth","html_url":"https:\/\/github.com\/david-waterworth","followers_url":"https:\/\/api.github.com\/users\/david-waterworth\/followers","following_url":"https:\/\/api.github.com\/users\/david-waterworth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/david-waterworth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/david-waterworth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/david-waterworth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/david-waterworth\/orgs","repos_url":"https:\/\/api.github.com\/users\/david-waterworth\/repos","events_url":"https:\/\/api.github.com\/users\/david-waterworth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/david-waterworth\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-06-18T06:47:26Z","updated_at":"2021-06-23T13:47:49Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I trained my own tokenizer, and I needed to use a python custom class. Because of this I have to detach the custom step before saving and reattach after restore. I did this using the standard pickle `__get_state__` \/ `__set_state__` mechanism. I think it's correct but it fails when I use it inside a function which is mapped to a dataset, i.e. in the manner of run_mlm.py and other huggingface scripts.\r\n\r\nThe following reproduces the issue - most likely I'm missing something\r\n\r\nA simulated tokeniser which can be pickled\r\n\r\n```\r\nclass CustomTokenizer:\r\n def __init__(self):\r\n self.state = \"init\"\r\n\r\n def __getstate__(self):\r\n print(\"__getstate__ called\")\r\n out = self.__dict__.copy()\r\n self.state = \"pickled\"\r\n return out\r\n \r\n def __setstate__(self, d):\r\n print(\"__setstate__ called\")\r\n self.__dict__ = d\r\n self.state = \"restored\"\r\n\r\ntokenizer = CustomTokenizer()\r\n```\r\n\r\nTest that it actually works - prints \"__getstate__ called\" and \"__setstate__ called\"\r\n```\r\nimport pickle\r\nserialized = pickle.dumps(tokenizer)\r\nrestored = pickle.loads(serialized)\r\nassert restored.state == \"restored\"\r\n```\r\n\r\nSimulate a function that tokenises examples, when dataset.map is called, this function \r\n```\r\ndef tokenize_function(examples):\r\n assert tokenizer.state == \"restored\" # this shouldn't fail but it does\r\n output = tokenizer(examples) # this will fail as tokenizer isn't really a tokenizer\r\n return output\r\n```\r\n\r\nUse map to simulate tokenization\r\n```\r\nimport glob\r\nfrom datasets import load_dataset\r\n\r\nassert tokenizer.state == \"restored\"\r\ntrain_files = glob.glob('train*.csv')\r\nvalidation_files = glob.glob('validation*.csv')\r\ndatasets = load_dataset(\"csv\", data_files=dict(train=train_files, validation=validation_files))\r\n\r\ntokenized_datasets = datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n)\r\n```\r\n\r\nWhat's happening is I can see that __getstate__ is called but not __setstate__, so the state of `tokenize_function` is invalid at the point that it's actually executed. This doesn't matter as far as I can see for the standard tokenizers as they don't use __getstate__ \/ __setstate__. I'm not sure if there's another hook I'm supposed to implement as well?\r\n\r\n---------------------------------------------------------------------------\r\nAssertionError Traceback (most recent call last)\r\n in \r\n 8 tokenized_datasets = datasets.map(\r\n 9 tokenize_function,\r\n---> 10 batched=True,\r\n 11 )\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py in map(self, function, with_indices, input_columns, batched, batch_size, remove_columns, keep_in_memory, load_from_cache_file, cache_file_names, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, desc)\r\n 487 desc=desc,\r\n 488 )\r\n--> 489 for k, dataset in self.items()\r\n 490 }\r\n 491 )\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py in (.0)\r\n 487 desc=desc,\r\n 488 )\r\n--> 489 for k, dataset in self.items()\r\n 490 }\r\n 491 )\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint, desc)\r\n 1633 fn_kwargs=fn_kwargs,\r\n 1634 new_fingerprint=new_fingerprint,\r\n-> 1635 desc=desc,\r\n 1636 )\r\n 1637 else:\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 184 }\r\n 185 # apply actual function\r\n--> 186 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 187 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 188 # re-apply format to the output\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 395 # Call actual function\r\n 396 \r\n--> 397 out = func(self, *args, **kwargs)\r\n 398 \r\n 399 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, desc)\r\n 1961 indices,\r\n 1962 check_same_num_examples=len(input_dataset.list_indexes()) > 0,\r\n-> 1963 offset=offset,\r\n 1964 )\r\n 1965 except NumExamplesMismatch:\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in apply_function_on_filtered_inputs(inputs, indices, check_same_num_examples, offset)\r\n 1853 effective_indices = [i + offset for i in indices] if isinstance(indices, list) else indices + offset\r\n 1854 processed_inputs = (\r\n-> 1855 function(*fn_args, effective_indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n 1856 )\r\n 1857 if update_data is None:\r\n\r\n in tokenize_function(examples)\r\n 1 def tokenize_function(examples):\r\n----> 2 assert tokenizer.state == \"restored\"\r\n 3 tokenizer(examples)\r\n 4 return examples\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2516\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2516\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2515","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2515\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2515\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2515\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2515","id":924435447,"node_id":"MDExOlB1bGxSZXF1ZXN0NjczMDc3NTIx","number":2515,"title":"CRD3 dataset card","user":{"login":"wilsonyhlee","id":1937386,"node_id":"MDQ6VXNlcjE5MzczODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1937386?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wilsonyhlee","html_url":"https:\/\/github.com\/wilsonyhlee","followers_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/followers","following_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/orgs","repos_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/repos","events_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-18T00:24:07Z","updated_at":"2021-06-21T10:18:44Z","closed_at":"2021-06-21T10:18:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2515","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2515","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2515.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2515.patch","merged_at":"2021-06-21T10:18:44Z"},"body":"This PR adds additional information to the CRD3 dataset card. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2515\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2515\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2514","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2514\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2514\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2514\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2514","id":924417172,"node_id":"MDU6SXNzdWU5MjQ0MTcxNzI=","number":2514,"title":"Can datasets remove duplicated rows?","user":{"login":"liuxinglan","id":16516583,"node_id":"MDQ6VXNlcjE2NTE2NTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16516583?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/liuxinglan","html_url":"https:\/\/github.com\/liuxinglan","followers_url":"https:\/\/api.github.com\/users\/liuxinglan\/followers","following_url":"https:\/\/api.github.com\/users\/liuxinglan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/liuxinglan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/liuxinglan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/liuxinglan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/liuxinglan\/orgs","repos_url":"https:\/\/api.github.com\/users\/liuxinglan\/repos","events_url":"https:\/\/api.github.com\/users\/liuxinglan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/liuxinglan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-06-17T23:35:38Z","updated_at":"2021-12-02T08:39:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\ni find myself more and more relying on datasets just to do all the preprocessing. One thing however, for removing duplicated rows, I couldn't find out how and am always converting datasets to pandas to do that..\r\n\r\n\r\n**Describe the solution you'd like**\r\nhave a functionality of \" remove duplicated rows\"\r\n\r\n**Describe alternatives you've considered**\r\nconvert dataset to pandas, remove duplicate, and convert back...\r\n\r\n\r\n**Additional context**\r\nno","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2514\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2514\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2513","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2513\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2513\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2513\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2513","id":924174413,"node_id":"MDU6SXNzdWU5MjQxNzQ0MTM=","number":2513,"title":"Corelation should be Correlation","user":{"login":"colbym-MM","id":71514164,"node_id":"MDQ6VXNlcjcxNTE0MTY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/71514164?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/colbym-MM","html_url":"https:\/\/github.com\/colbym-MM","followers_url":"https:\/\/api.github.com\/users\/colbym-MM\/followers","following_url":"https:\/\/api.github.com\/users\/colbym-MM\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/colbym-MM\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/colbym-MM\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/colbym-MM\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/colbym-MM\/orgs","repos_url":"https:\/\/api.github.com\/users\/colbym-MM\/repos","events_url":"https:\/\/api.github.com\/users\/colbym-MM\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/colbym-MM\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-17T17:28:48Z","updated_at":"2021-06-18T08:43:55Z","closed_at":"2021-06-18T08:43:55Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"https:\/\/github.com\/huggingface\/datasets\/blob\/0e87e1d053220e8ecddfa679bcd89a4c7bc5af62\/metrics\/matthews_correlation\/matthews_correlation.py#L66","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2513\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2513\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2512","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2512\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2512\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2512\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2512","id":924069353,"node_id":"MDU6SXNzdWU5MjQwNjkzNTM=","number":2512,"title":"seqeval metric does not work with a recent version of sklearn: classification_report() got an unexpected keyword argument 'output_dict'","user":{"login":"avidale","id":8642136,"node_id":"MDQ6VXNlcjg2NDIxMzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8642136?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/avidale","html_url":"https:\/\/github.com\/avidale","followers_url":"https:\/\/api.github.com\/users\/avidale\/followers","following_url":"https:\/\/api.github.com\/users\/avidale\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/avidale\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/avidale\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/avidale\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/avidale\/orgs","repos_url":"https:\/\/api.github.com\/users\/avidale\/repos","events_url":"https:\/\/api.github.com\/users\/avidale\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/avidale\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-17T15:36:02Z","updated_at":"2021-06-17T15:46:07Z","closed_at":"2021-06-17T15:46:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset, load_metric\r\nseqeval = load_metric(\"seqeval\")\r\nseqeval.compute(predictions=[['A']], references=[['A']])\r\n```\r\n\r\n## Expected results\r\nThe function computes a dict with metrics\r\n\r\n## Actual results\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n 1 from datasets import load_dataset, load_metric\r\n 2 seqeval = load_metric(\"seqeval\")\r\n----> 3 seqeval.compute(predictions=[['A']], references=[['A']])\r\n\r\n~\/p3\/lib\/python3.7\/site-packages\/datasets\/metric.py in compute(self, *args, **kwargs)\r\n 396 references = self.data[\"references\"]\r\n 397 with temp_seed(self.seed):\r\n--> 398 output = self._compute(predictions=predictions, references=references, **kwargs)\r\n 399 \r\n 400 if self.buf_writer is not None:\r\n\r\n~\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/seqeval\/81eda1ff004361d4fa48754a446ec69bb7aa9cf4d14c7215f407d1475941c5ff\/seqeval.py in _compute(self, predictions, references, suffix)\r\n 95 \r\n 96 def _compute(self, predictions, references, suffix=False):\r\n---> 97 report = classification_report(y_true=references, y_pred=predictions, suffix=suffix, output_dict=True)\r\n 98 report.pop(\"macro avg\")\r\n 99 report.pop(\"weighted avg\")\r\n\r\nTypeError: classification_report() got an unexpected keyword argument 'output_dict'\r\n```\r\n\r\n## Environment info\r\nsklearn=0.24\r\ndatasets=1.1.3\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2512\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2512\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2511","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2511\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2511\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2511\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2511","id":923762133,"node_id":"MDU6SXNzdWU5MjM3NjIxMzM=","number":2511,"title":"Add C4","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-06-17T10:31:04Z","updated_at":"2021-07-05T12:36:58Z","closed_at":"2021-07-05T12:36:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *C4*\r\n- **Description:** *https:\/\/github.com\/allenai\/allennlp\/discussions\/5056*\r\n- **Paper:** *https:\/\/arxiv.org\/abs\/1910.10683*\r\n- **Data:** *https:\/\/huggingface.co\/datasets\/allenai\/c4*\r\n- **Motivation:** *Used a lot for pretraining*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nShould fix https:\/\/github.com\/huggingface\/datasets\/issues\/1710","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2511\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2511\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2510","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2510\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2510\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2510\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2510","id":923735485,"node_id":"MDExOlB1bGxSZXF1ZXN0NjcyNDY3MzY3","number":2510,"title":"Add align_labels_with_mapping to DatasetDict","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-17T10:03:35Z","updated_at":"2021-06-17T10:45:25Z","closed_at":"2021-06-17T10:45:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2510","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2510","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2510.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2510.patch","merged_at":"2021-06-17T10:45:24Z"},"body":"https:\/\/github.com\/huggingface\/datasets\/pull\/2457 added the `Dataset.align_labels_with_mapping` method.\r\nIn this PR I also added `DatasetDict.align_labels_with_mapping`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2510\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2510\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2509","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2509\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2509\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2509\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2509","id":922846035,"node_id":"MDExOlB1bGxSZXF1ZXN0NjcxNjcyMzU5","number":2509,"title":"Fix fingerprint when moving cache dir","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-16T16:45:09Z","updated_at":"2021-06-21T15:05:04Z","closed_at":"2021-06-21T15:05:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2509","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2509","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2509.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2509.patch","merged_at":"2021-06-21T15:05:03Z"},"body":"The fingerprint of a dataset changes if the cache directory is moved.\r\nI fixed that by setting the fingerprint to be the hash of:\r\n- the relative cache dir (dataset_name\/version\/config_id)\r\n- the requested split\r\n\r\nClose #2496 \r\n\r\nI had to fix an issue with the filelock filename that was too long (>255). It prevented the tests to run on my machine. I just added `hash_filename_if_too_long` in case this happens, to not get filenames longer than 255.\r\nWe usually have long filenames for filelocks because they are named after the path that is being locked. In case the path is a cache directory that has long directory names, then the filelock filename could en up being very long.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2509\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2509\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2508","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2508\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2508\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2508\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2508","id":921863173,"node_id":"MDU6SXNzdWU5MjE4NjMxNzM=","number":2508,"title":"Load Image Classification Dataset from Local ","user":{"login":"Jacobsolawetz","id":8428198,"node_id":"MDQ6VXNlcjg0MjgxOTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8428198?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Jacobsolawetz","html_url":"https:\/\/github.com\/Jacobsolawetz","followers_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/followers","following_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/orgs","repos_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/repos","events_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"assignees":[{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-06-15T22:43:33Z","updated_at":"2022-02-10T16:57:48Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nYes - we would like to load an image classification dataset with datasets without having to write a custom data loader.\r\n\r\n**Describe the solution you'd like**\r\n\r\nGiven a folder structure with images of each class in each folder, the ability to load these folders into a HuggingFace dataset like \"cifar10\".\r\n\r\n**Describe alternatives you've considered**\r\n\r\nImplement ViT training outside of the HuggingFace Trainer and without datasets (we did this but prefer to stay on the main path)\r\n\r\nWrite custom data loader logic\r\n\r\n**Additional context**\r\n\r\nWe're training ViT on custom dataset\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2508\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2508\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2507","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2507\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2507\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2507\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2507","id":921441962,"node_id":"MDExOlB1bGxSZXF1ZXN0NjcwNDQ0MDgz","number":2507,"title":"Rearrange JSON field names to match passed features schema field names","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":0,"created_at":"2021-06-15T14:10:02Z","updated_at":"2021-06-16T10:47:49Z","closed_at":"2021-06-16T10:47:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2507","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2507","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2507.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2507.patch","merged_at":"2021-06-16T10:47:49Z"},"body":"This PR depends on PR #2453 (which must be merged first).\r\n\r\nClose #2366.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2507\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2507\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2506","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2506\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2506\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2506\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2506","id":921435598,"node_id":"MDExOlB1bGxSZXF1ZXN0NjcwNDM4NTgx","number":2506,"title":"Add course banner","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-15T14:03:54Z","updated_at":"2021-06-15T16:25:36Z","closed_at":"2021-06-15T16:25:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2506","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2506","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2506.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2506.patch","merged_at":"2021-06-15T16:25:35Z"},"body":"This PR adds a course banner similar to the one you can now see in the [Transformers repo](https:\/\/github.com\/huggingface\/transformers) that links to the course. Let me know if placement seems right to you or not, I can move it just below the badges too.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2506\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2506\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2505","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2505\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2505\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2505\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2505","id":921234797,"node_id":"MDExOlB1bGxSZXF1ZXN0NjcwMjY2NjQy","number":2505,"title":"Make numpy arrow extractor faster","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-06-15T10:11:32Z","updated_at":"2021-06-28T09:53:39Z","closed_at":"2021-06-28T09:53:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2505","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2505","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2505.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2505.patch","merged_at":"2021-06-28T09:53:38Z"},"body":"I changed the NumpyArrowExtractor to call directly to_numpy and see if it can lead to speed-ups as discussed in https:\/\/github.com\/huggingface\/datasets\/issues\/2498\r\n\r\nThis could make the numpy\/torch\/tf\/jax formatting faster","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2505\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2505\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2503","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2503\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2503\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2503\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2503","id":920636186,"node_id":"MDU6SXNzdWU5MjA2MzYxODY=","number":2503,"title":"SubjQA wrong boolean values in entries","user":{"login":"arnaudstiegler","id":26485052,"node_id":"MDQ6VXNlcjI2NDg1MDUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26485052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arnaudstiegler","html_url":"https:\/\/github.com\/arnaudstiegler","followers_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/followers","following_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/orgs","repos_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/repos","events_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-06-14T17:42:46Z","updated_at":"2021-08-25T03:52:06Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nSubjQA seems to have a boolean that's consistently wrong.\r\n\r\nIt defines:\r\n- question_subj_level: The subjectiviy level of the question (on a 1 to 5 scale with 1 being the most subjective).\r\n- is_ques_subjective: A boolean subjectivity label derived from question_subj_level (i.e., scores below 4 are considered as subjective)\r\n\r\nHowever, `is_ques_subjective` seems to have wrong values in the entire dataset.\r\n\r\nFor instance, in the example in the dataset card, we have:\r\n- \"question_subj_level\": 2\r\n- \"is_ques_subjective\": false\r\n\r\nHowever, according to the description, the question should be subjective since the `question_subj_level` is below 4\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2503\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2503\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2502","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2502\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2502\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2502\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2502","id":920623572,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY5NzQ1MDA5","number":2502,"title":"JAX integration","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-14T17:24:23Z","updated_at":"2021-06-21T16:15:50Z","closed_at":"2021-06-21T16:15:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2502","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2502","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2502.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2502.patch","merged_at":"2021-06-21T16:15:48Z"},"body":"Hi !\r\n\r\nI just added the \"jax\" formatting, as we already have for pytorch, tensorflow, numpy (and also pandas and arrow).\r\nIt does pretty much the same thing as the pytorch formatter except it creates jax.numpy.ndarray objects.\r\n\r\n```python\r\nfrom datasets import Dataset\r\n\r\nd = Dataset.from_dict({\"foo\": [[0., 1., 2.]]})\r\nd = d.with_format(\"jax\")\r\nd[0]\r\n# {'foo': DeviceArray([0., 1., 2.], dtype=float32)}\r\n```\r\n\r\nA few details:\r\n- The default integer precision for jax depends on the jax configuration `jax_enable_x64` (see [here](https:\/\/jax.readthedocs.io\/en\/latest\/notebooks\/Common_Gotchas_in_JAX.html#double-64bit-precision)), I took that into account. Unless `jax_enable_x64` is specified, it is int32 by default\r\n- AFAIK it's not possible to do a full conversion from arrow data to jax data. We are doing arrow -> numpy -> jax but the numpy -> jax part doesn't do zero copy unfortutanely (see [here](https:\/\/github.com\/google\/jax\/issues\/4486))\r\n- the env var for disabling JAX is `USE_JAX`. However I noticed that in `transformers` it is `USE_FLAX`. This is not an issue though IMO\r\n\r\nI also updated `convert_to_python_objects` to allow users to pass jax.numpy.ndarray objects to build a dataset.\r\n\r\nSince the `convert_to_python_objects` method became slow because it's the time when pytorch, tf (and now jax) are imported, I fixed it by checking the `sys.modules` to avoid unecessary import of pytorch, tf or jax.\r\n\r\nClose #2495","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2502\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2502\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2501","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2501\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2501\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2501\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2501","id":920579634,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY5NzA3Nzc0","number":2501,"title":"Add Zenodo metadata file with license","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":0,"created_at":"2021-06-14T16:28:12Z","updated_at":"2021-06-14T16:49:42Z","closed_at":"2021-06-14T16:49:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2501","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2501","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2501.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2501.patch","merged_at":"2021-06-14T16:49:42Z"},"body":"This Zenodo metadata file fixes the name of the `Datasets` license appearing in the DOI as `\"Apache-2.0\"`, which otherwise by default is `\"other-open\"`.\r\n\r\nClose #2472. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2501\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2501\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2500","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2500\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2500\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2500\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2500","id":920471411,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY5NjE2MjQ1","number":2500,"title":"Add load_dataset_builder","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-06-14T14:27:45Z","updated_at":"2021-07-09T00:08:16Z","closed_at":"2021-07-05T10:45:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2500","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2500","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2500.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2500.patch","merged_at":"2021-07-05T10:45:57Z"},"body":"Adds the `load_dataset_builder` function. The good thing is that we can reuse this function to load the dataset info without downloading the dataset itself.\r\n\r\nTODOs:\r\n- [x] Add docstring and entry in the docs\r\n- [x] Add tests\r\n\r\nCloses #2484 \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2500\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2500\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2499","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2499\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2499\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2499\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2499","id":920413021,"node_id":"MDU6SXNzdWU5MjA0MTMwMjE=","number":2499,"title":" Python Programming Puzzles","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-14T13:27:18Z","updated_at":"2021-06-15T18:14:14Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Python Programming Puzzles\r\n- **Description:** Programming challenge called programming puzzles, as an objective and comprehensive evaluation of program synthesis\r\n- **Paper:** https:\/\/arxiv.org\/pdf\/2106.05784.pdf\r\n- **Data:** https:\/\/github.com\/microsoft\/PythonProgrammingPuzzles ([Scrolling through the data](https:\/\/github.com\/microsoft\/PythonProgrammingPuzzles\/blob\/main\/problems\/README.md))\r\n- **Motivation:** Spans a large range of difficulty, problems, and domains. A useful resource for evaluation as we don't have a clear understanding of the abilities and skills of extremely large LMs.\r\n\r\nNote: it's a growing dataset (contributions are welcome), so we'll need careful versioning for this dataset.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2499\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2499\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2498","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2498\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2498\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2498\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2498","id":920411285,"node_id":"MDU6SXNzdWU5MjA0MTEyODU=","number":2498,"title":"Improve torch formatting performance","user":{"login":"vblagoje","id":458335,"node_id":"MDQ6VXNlcjQ1ODMzNQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/458335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vblagoje","html_url":"https:\/\/github.com\/vblagoje","followers_url":"https:\/\/api.github.com\/users\/vblagoje\/followers","following_url":"https:\/\/api.github.com\/users\/vblagoje\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vblagoje\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vblagoje\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vblagoje\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vblagoje\/orgs","repos_url":"https:\/\/api.github.com\/users\/vblagoje\/repos","events_url":"https:\/\/api.github.com\/users\/vblagoje\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vblagoje\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2021-06-14T13:25:24Z","updated_at":"2021-06-21T09:54:54Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nIt would be great, if possible, to further improve read performance of raw encoded datasets and their subsequent conversion to torch tensors. \r\n\r\nA bit more background. I am working on LM pre-training using HF ecosystem. We use encoded HF Wikipedia and BookCorpus datasets. The training machines are similar to DGX-1 workstations. We use HF trainer torch.distributed training approach on a single machine with 8 GPUs.\r\n\r\nThe current performance is about 30% slower than NVidia optimized BERT [examples](https:\/\/github.com\/NVIDIA\/DeepLearningExamples\/tree\/master\/PyTorch\/LanguageModeling) baseline. Quite a bit of customized code and training loop tricks were used to achieve the baseline performance. It would be great to achieve the same performance while using nothing more than off the shelf HF ecosystem. Perhaps, in the future, with @stas00 work on deepspeed integration, it could even be exceeded. \r\n\r\n**Describe the solution you'd like**\r\nUsing profiling tools we've observed that appx. 25% of cumulative run time is spent on data loader next call.\r\n![dataloader_next](https:\/\/user-images.githubusercontent.com\/458335\/121895543-59742a00-ccee-11eb-85fb-f07715e3f1f6.png)\r\n\r\nAs you can observe most of the data loader next call is spent in HF datasets torch_formatter.py format_batch call. \r\n\r\nDigging a bit deeper into format_batch we can see the following profiler data:\r\n![torch_formatter](https:\/\/user-images.githubusercontent.com\/458335\/121895944-c7b8ec80-ccee-11eb-95d5-5875c5716c30.png)\r\n\r\nOnce again, a lot of time is spent in pyarrow table conversion to pandas which seems like an intermediary step. Offline @lhoestq told me that this approach was, for some unknown reason, faster than direct to numpy conversion. \r\n\r\n**Describe alternatives you've considered**\r\nI am not familiar with pyarrow and have not yet considered the alternatives to the current approach. \r\n\r\nMost of the online advice around data loader performance improvements revolve around increasing number of workers, using pin memory for copying tensors from host device to gpus but we've already tried these avenues without much performance improvement. Weights & Biases dashboard for the pre-training task reports CPU utilization of ~ 10%, GPUs are completely saturated (GPU utilization is above 95% on all GPUs), while disk utilization is above 90%. \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2498\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2498\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2497","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2497\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2497\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2497\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2497","id":920250382,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY5NDI3OTU3","number":2497,"title":"Use default cast for sliced list arrays if pyarrow >= 4","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":2,"created_at":"2021-06-14T10:02:47Z","updated_at":"2021-06-15T18:06:18Z","closed_at":"2021-06-14T14:24:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2497","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2497","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2497.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2497.patch","merged_at":"2021-06-14T14:24:37Z"},"body":"From pyarrow version 4, it is supported to cast sliced lists.\r\n\r\nThis PR uses default pyarrow cast in Datasets to cast sliced list arrays if pyarrow version is >= 4.\r\n\r\nIn relation with PR #2461 and #2490.\r\n\r\ncc: @lhoestq, @abhi1thakur, @SBrandeis","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2497\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2497\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2496","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2496\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2496\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2496\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2496","id":920216314,"node_id":"MDU6SXNzdWU5MjAyMTYzMTQ=","number":2496,"title":"Dataset fingerprint changes after moving the cache directory, which prevent cache reload when using `map`","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-14T09:20:26Z","updated_at":"2021-06-21T15:05:03Z","closed_at":"2021-06-21T15:05:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`Dataset.map` uses the dataset fingerprint (a hash) for caching.\r\nHowever the fingerprint seems to change when someone moves the cache directory of the dataset.\r\n\r\nThis is because it uses the default fingerprint generation:\r\n1. the dataset path is used to get the fingerprint\r\n2. the modification times of the arrow file is also used to get the fingerprint\r\n\r\nTo fix that we could set the fingerprint of the dataset to be a hash of (, , , ), i.e. a hash of the the cache path relative to the cache directory.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2496\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2496\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2495","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2495\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2495\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2495\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2495","id":920170030,"node_id":"MDU6SXNzdWU5MjAxNzAwMzA=","number":2495,"title":"JAX formatting","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-14T08:32:07Z","updated_at":"2021-06-21T16:15:49Z","closed_at":"2021-06-21T16:15:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"We already support pytorch, tensorflow, numpy, pandas and arrow dataset formatting. Let's add jax as well","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2495\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2495\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2494","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2494\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2494\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2494\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2494","id":920149183,"node_id":"MDU6SXNzdWU5MjAxNDkxODM=","number":2494,"title":"Improve docs on Enhancing performance","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-14T08:11:48Z","updated_at":"2021-06-14T08:11:48Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the [\"Enhancing performance\"](https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#enhancing-performance) section of docs, add specific use cases:\r\n- How to make datasets the fastest\r\n- How to make datasets take the less RAM\r\n- How to make datasets take the less hard drive mem\r\n\r\ncc: @thomwolf \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2494\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2494\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2493","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2493\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2493\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2493\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2493","id":919833281,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY5MDc4OTcw","number":2493,"title":"add tensorflow-macos support","user":{"login":"slayerjain","id":12831254,"node_id":"MDQ6VXNlcjEyODMxMjU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12831254?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slayerjain","html_url":"https:\/\/github.com\/slayerjain","followers_url":"https:\/\/api.github.com\/users\/slayerjain\/followers","following_url":"https:\/\/api.github.com\/users\/slayerjain\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slayerjain\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slayerjain\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slayerjain\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slayerjain\/orgs","repos_url":"https:\/\/api.github.com\/users\/slayerjain\/repos","events_url":"https:\/\/api.github.com\/users\/slayerjain\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slayerjain\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-13T16:20:08Z","updated_at":"2021-06-15T08:53:06Z","closed_at":"2021-06-15T08:53:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2493","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2493","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2493.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2493.patch","merged_at":"2021-06-15T08:53:06Z"},"body":"ref - https:\/\/github.com\/huggingface\/datasets\/issues\/2068","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2493\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2493\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2492","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2492\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2492\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2492\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2492","id":919718102,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4OTkxODk4","number":2492,"title":"Eduge","user":{"login":"enod","id":6023883,"node_id":"MDQ6VXNlcjYwMjM4ODM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6023883?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/enod","html_url":"https:\/\/github.com\/enod","followers_url":"https:\/\/api.github.com\/users\/enod\/followers","following_url":"https:\/\/api.github.com\/users\/enod\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/enod\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/enod\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/enod\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/enod\/orgs","repos_url":"https:\/\/api.github.com\/users\/enod\/repos","events_url":"https:\/\/api.github.com\/users\/enod\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/enod\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-13T05:10:59Z","updated_at":"2021-06-22T09:49:04Z","closed_at":"2021-06-16T10:41:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2492","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2492","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2492.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2492.patch","merged_at":"2021-06-16T10:41:46Z"},"body":"Hi, awesome folks behind the huggingface! \r\n\r\nHere is my PR for the text classification dataset in Mongolian.\r\n\r\nPlease do let me know in case you have anything to clarify. \r\n\r\nThanks & Regards,\r\nEnod","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2492\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2492\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2491","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2491\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2491\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2491\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2491","id":919714506,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4OTg5MTUw","number":2491,"title":"add eduge classification dataset","user":{"login":"enod","id":6023883,"node_id":"MDQ6VXNlcjYwMjM4ODM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6023883?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/enod","html_url":"https:\/\/github.com\/enod","followers_url":"https:\/\/api.github.com\/users\/enod\/followers","following_url":"https:\/\/api.github.com\/users\/enod\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/enod\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/enod\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/enod\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/enod\/orgs","repos_url":"https:\/\/api.github.com\/users\/enod\/repos","events_url":"https:\/\/api.github.com\/users\/enod\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/enod\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-13T04:37:01Z","updated_at":"2021-06-13T05:06:48Z","closed_at":"2021-06-13T05:06:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2491","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2491","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2491.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2491.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2491\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2491\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2490","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2490\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2490\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2490\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2490","id":919571385,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4ODc4NDA3","number":2490,"title":"Allow latest pyarrow version","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":1,"created_at":"2021-06-12T14:17:34Z","updated_at":"2021-07-06T16:54:52Z","closed_at":"2021-06-14T07:53:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2490","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2490","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2490.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2490.patch","merged_at":"2021-06-14T07:53:23Z"},"body":"Allow latest pyarrow version, once that version 4.0.1 fixes the segfault bug introduced in version 4.0.0.\r\n\r\nClose #2489.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2490\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2490\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2489","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2489\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2489\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2489\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2489","id":919569749,"node_id":"MDU6SXNzdWU5MTk1Njk3NDk=","number":2489,"title":"Allow latest pyarrow version once segfault bug is fixed","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-12T14:09:52Z","updated_at":"2021-06-14T07:53:23Z","closed_at":"2021-06-14T07:53:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As pointed out by @symeneses (see https:\/\/github.com\/huggingface\/datasets\/pull\/2268#issuecomment-860048613), pyarrow has fixed the segfault bug present in version 4.0.0 (see https:\/\/issues.apache.org\/jira\/browse\/ARROW-12568):\r\n- it was fixed on 3 May 2021\r\n- version 4.0.1 was released on 19 May 2021 with the bug fix","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2489\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2489\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2488","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2488\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2488\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2488\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2488","id":919500756,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4ODIwNDA1","number":2488,"title":"Set configurable downloaded datasets path","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":0,"created_at":"2021-06-12T09:09:03Z","updated_at":"2021-06-14T09:13:27Z","closed_at":"2021-06-14T08:29:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2488","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2488","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2488.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2488.patch","merged_at":"2021-06-14T08:29:07Z"},"body":"Part of #2480.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2488\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2488\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2487","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2487\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2487\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2487\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2487","id":919452407,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4Nzc5Mjk0","number":2487,"title":"Set configurable extracted datasets path","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":2,"created_at":"2021-06-12T05:47:29Z","updated_at":"2021-06-14T09:30:17Z","closed_at":"2021-06-14T09:02:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2487","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2487","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2487.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2487.patch","merged_at":"2021-06-14T09:02:56Z"},"body":"Part of #2480.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2487\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2487\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2486","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2486\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2486\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2486\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2486","id":919174898,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4NTI2Njg3","number":2486,"title":"Add Rico Dataset","user":{"login":"ncoop57","id":7613470,"node_id":"MDQ6VXNlcjc2MTM0NzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7613470?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ncoop57","html_url":"https:\/\/github.com\/ncoop57","followers_url":"https:\/\/api.github.com\/users\/ncoop57\/followers","following_url":"https:\/\/api.github.com\/users\/ncoop57\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ncoop57\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ncoop57\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ncoop57\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ncoop57\/orgs","repos_url":"https:\/\/api.github.com\/users\/ncoop57\/repos","events_url":"https:\/\/api.github.com\/users\/ncoop57\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ncoop57\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-11T20:17:41Z","updated_at":"2021-09-09T08:29:26Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2486","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2486","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2486.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2486.patch","merged_at":null},"body":"Hi there!\r\n\r\nI'm wanting to add the Rico datasets for software engineering type data to y'alls awesome library. However, as I have started coding, I've ran into a few hiccups so I thought it best to open the PR early to get a bit of discussion on how the Rico datasets should be added to the `datasets` lib.\r\n\r\n1) There are 7 different datasets under Rico and so I was wondering, should I make a folder for each or should I put them as different configurations of a single dataset?\r\nYou can see the datasets available for Rico here: http:\/\/interactionmining.org\/rico\r\n\r\n2) As of right now, I have a semi working version of the first dataset which has pairs of screenshots and hierarchies from android applications. However, these screenshots are very large (1440, 2560, 3) and there are 66,000 of them so I am not able to perform the processing that the `datasets` lib does after downloading and extracting the dataset since I run out of memory very fast. Is there a way to have `datasets` lib not put everything into memory while it is processing the dataset?\r\n\r\n2.1) If there is not a way, would it be better to just return the path to the screenshots instead of the actual image?\r\n\r\n3) The hierarchies are JSON objects and looking through the documentation of `datasets`, I didn't see any feature that I could use for this type of data. So, for now I just have it being read in as a string, is this okay or should I be doing it differently?\r\n\r\n4) One of the Rico datasets is a bunch of animations (GIFs), is there a `datasets` feature that I can put this type of data into or should I just return the path as a string?\r\n\r\nI appreciate any and all help I can get for this PR, I think the Rico datasets will be an awesome addition to the library :nerd_face: !","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2486\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2486\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2485","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2485\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2485\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2485\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2485","id":919099218,"node_id":"MDU6SXNzdWU5MTkwOTkyMTg=","number":2485,"title":"Implement layered building","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-11T18:54:25Z","updated_at":"2021-06-11T18:54:25Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As discussed with @stas00 and @lhoestq (see also here https:\/\/github.com\/huggingface\/datasets\/issues\/2481#issuecomment-859712190):\r\n\r\n> My suggestion for this would be to have this enabled by default.\r\n> \r\n> Plus I don't know if there should be a dedicated issue to that is another functionality. But I propose layered building rather than all at once. That is:\r\n>\r\n> 1. uncompress a handful of files via a generator enough to generate one arrow file\r\n> 2. process arrow file 1\r\n> 3. delete all the files that went in and aren't needed anymore.\r\n>\r\n> rinse and repeat.\r\n> \r\n> 1. This way much less disc space will be required - e.g. on JZ we won't be running into inode limitation, also it'd help with the collaborative hub training project\r\n> 2. The user doesn't need to go and manually clean up all the huge files that were left after pre-processing\r\n> 3. It would already include deleting temp files this issue is talking about\r\n> \r\n> I wonder if the new streaming API would be of help, except here the streaming would be into arrow files as the destination, rather than dataloaders.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2485\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2485\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2484","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2484\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2484\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2484\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2484","id":919092635,"node_id":"MDU6SXNzdWU5MTkwOTI2MzU=","number":2484,"title":"Implement loading a dataset builder","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-11T18:47:22Z","updated_at":"2021-07-05T10:45:57Z","closed_at":"2021-07-05T10:45:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As discussed with @stas00 and @lhoestq, this would allow things like:\r\n\r\n```python\r\nfrom datasets import load_dataset_builder\r\ndataset_name = \"openwebtext\"\r\nbuilder = load_dataset_builder(dataset_name)\r\nprint(builder.cache_dir)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2484\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2484\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2483","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2483\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2483\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2483\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2483","id":918871712,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4MjU1Mjg1","number":2483,"title":"Use gc.collect only when needed to avoid slow downs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-11T15:09:30Z","updated_at":"2021-06-18T19:25:06Z","closed_at":"2021-06-11T15:31:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2483","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2483","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2483.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2483.patch","merged_at":"2021-06-11T15:31:35Z"},"body":"In https:\/\/github.com\/huggingface\/datasets\/commit\/42320a110d9d072703814e1f630a0d90d626a1e6 we added a call to gc.collect to resolve some issues on windows (see https:\/\/github.com\/huggingface\/datasets\/pull\/2482)\r\n\r\nHowever calling gc.collect too often causes significant slow downs (the CI run time doubled).\r\nSo I just moved the gc.collect call to the exact place where it's actually needed: when post-processing a dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2483\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2483\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2482","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2482\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2482\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2482\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2482","id":918846027,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4MjMyMzI5","number":2482,"title":"Allow to use tqdm>=4.50.0","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-11T14:49:21Z","updated_at":"2021-06-11T15:11:51Z","closed_at":"2021-06-11T15:11:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2482","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2482","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2482.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2482.patch","merged_at":"2021-06-11T15:11:50Z"},"body":"We used to have permission errors on windows whith the latest versions of tqdm (see [here](https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/6365\/workflows\/24f7c960-3176-43a5-9652-7830a23a981e\/jobs\/39232))\r\n\r\nThey were due to open arrow files not properly closed by pyarrow.\r\nSince https:\/\/github.com\/huggingface\/datasets\/commit\/42320a110d9d072703814e1f630a0d90d626a1e6 gc.collect is called each time we don't need an arrow file to make sure that the files are closed.\r\n\r\nclose https:\/\/github.com\/huggingface\/datasets\/issues\/2471\r\n\r\ncc @lewtun ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2482\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2482\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2481","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2481\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2481\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2481\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2481","id":918680168,"node_id":"MDU6SXNzdWU5MTg2ODAxNjg=","number":2481,"title":"Delete extracted files to save disk space","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":1,"created_at":"2021-06-11T12:21:52Z","updated_at":"2021-07-19T09:08:18Z","closed_at":"2021-07-19T09:08:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As discussed with @stas00 and @lhoestq, allowing the deletion of extracted files would save a great amount of disk space to typical user.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2481\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2481\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2480","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2480\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2480\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2480\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2480","id":918678578,"node_id":"MDU6SXNzdWU5MTg2Nzg1Nzg=","number":2480,"title":"Set download\/extracted paths configurable","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-11T12:20:24Z","updated_at":"2021-06-15T14:23:49Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As discussed with @stas00 and @lhoestq, setting these paths configurable may allow to overcome disk space limitation on different partitions\/drives.\r\n\r\nTODO:\r\n- [x] Set configurable extracted datasets path: #2487\r\n- [x] Set configurable downloaded datasets path: #2488\r\n- [ ] Set configurable \"incomplete\" datasets path?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2480\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2480\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2479","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2479\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2479\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2479\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2479","id":918672431,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4MDc3NTI4","number":2479,"title":"\u274c load_datasets \u274c","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-11T12:14:36Z","updated_at":"2021-06-11T14:46:25Z","closed_at":"2021-06-11T14:46:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2479","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2479","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2479.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2479.patch","merged_at":"2021-06-11T14:46:24Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2479\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2479\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2478","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2478\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2478\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2478\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2478","id":918507510,"node_id":"MDU6SXNzdWU5MTg1MDc1MTA=","number":2478,"title":"Create release script","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-11T09:38:02Z","updated_at":"2021-06-11T09:38:02Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Create a script so that releases can be done automatically (as done in `transformers`).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2478\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2478\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2477","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2477\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2477\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2477\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2477","id":918334431,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY3NzczMTY0","number":2477,"title":"Fix docs custom stable version","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":4,"created_at":"2021-06-11T07:26:03Z","updated_at":"2021-06-14T09:14:20Z","closed_at":"2021-06-14T08:20:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2477","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2477","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2477.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2477.patch","merged_at":"2021-06-14T08:20:18Z"},"body":"Currently docs default version is 1.5.0. This PR fixes this and sets the latest version instead.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2477\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2477\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2476","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2476\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2476\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2476\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2476","id":917686662,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY3MTg3OTk1","number":2476,"title":"Add TimeDial","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-10T18:33:07Z","updated_at":"2021-07-30T12:57:54Z","closed_at":"2021-07-30T12:57:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2476","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2476","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2476.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2476.patch","merged_at":"2021-07-30T12:57:54Z"},"body":"Dataset: https:\/\/github.com\/google-research-datasets\/TimeDial\r\n\r\nTo-Do: Update README.md and add YAML tags","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2476\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2476\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2475","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2475\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2475\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2475\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2475","id":917650882,"node_id":"MDU6SXNzdWU5MTc2NTA4ODI=","number":2475,"title":"Issue in timit_asr database","user":{"login":"hrahamim","id":85702107,"node_id":"MDQ6VXNlcjg1NzAyMTA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/85702107?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hrahamim","html_url":"https:\/\/github.com\/hrahamim","followers_url":"https:\/\/api.github.com\/users\/hrahamim\/followers","following_url":"https:\/\/api.github.com\/users\/hrahamim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hrahamim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hrahamim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hrahamim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hrahamim\/orgs","repos_url":"https:\/\/api.github.com\/users\/hrahamim\/repos","events_url":"https:\/\/api.github.com\/users\/hrahamim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hrahamim\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-10T18:05:29Z","updated_at":"2021-06-13T08:13:50Z","closed_at":"2021-06-13T08:13:13Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to load the timit_asr dataset however only the first record is shown (duplicated over all the rows).\r\nI am using the next code line\r\ndataset = load_dataset(\u201ctimit_asr\u201d, split=\u201ctest\u201d).shuffle().select(range(10))\r\n\r\nThe above code result with the same sentence duplicated ten times.\r\nIt also happens when I use the dataset viewer at Streamlit .\r\n\r\n## Steps to reproduce the bug\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\u201ctimit_asr\u201d, split=\u201ctest\u201d).shuffle().select(range(10))\r\ndata = dataset.to_pandas()\r\n\r\n# Sample code to reproduce the bug\r\n```\r\n\r\n## Expected results\r\ntable with different row information\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n\r\n- `datasets` version: 1.4.1 (also occur in the latest version)\r\n- Platform: Linux-4.15.0-143-generic-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.6.9\r\n- PyTorch version (GPU?): 1.8.1+cu102 (False)\r\n- Tensorflow version (GPU?): 1.15.3 (False)\r\n- Using GPU in script?: No\r\n- Using distributed or parallel set-up in script?: No\r\n\r\n- `datasets` version:\r\n- Platform:\r\n- Python version:\r\n- PyArrow version:\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2475\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2475\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2474","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2474\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2474\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2474\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2474","id":917622055,"node_id":"MDU6SXNzdWU5MTc2MjIwNTU=","number":2474,"title":"cache_dir parameter for load_from_disk ?","user":{"login":"TaskManager91","id":7063207,"node_id":"MDQ6VXNlcjcwNjMyMDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7063207?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TaskManager91","html_url":"https:\/\/github.com\/TaskManager91","followers_url":"https:\/\/api.github.com\/users\/TaskManager91\/followers","following_url":"https:\/\/api.github.com\/users\/TaskManager91\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TaskManager91\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TaskManager91\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TaskManager91\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TaskManager91\/orgs","repos_url":"https:\/\/api.github.com\/users\/TaskManager91\/repos","events_url":"https:\/\/api.github.com\/users\/TaskManager91\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TaskManager91\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-06-10T17:39:36Z","updated_at":"2021-06-14T08:41:09Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nWhen using Google Colab big datasets can be an issue, as they won't fit on the VM's disk. Therefore mounting google drive could be a possible solution. Unfortunatly when loading my own dataset by using the _load_from_disk_ function, the data gets cached to the VM's disk:\r\n\r\n`\r\nfrom datasets import load_from_disk\r\n\r\nmyPreprocessedData = load_from_disk(\"\/content\/gdrive\/MyDrive\/ASR_data\/myPreprocessedData\")\r\n\r\n`\r\nI know that chaching on google drive could slow down learning. But at least it would run.\r\n\r\n**Describe the solution you'd like**\r\nAdd cache_Dir parameter to the load_from_disk function.\r\n\r\n**Describe alternatives you've considered**\r\nIt looks like you could write a custom loading script for the load_dataset function. But this seems to be much too complex for my use case. Is there perhaps a template here that uses the load_from_disk function?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2474\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2474\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2473","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2473\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2473\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2473\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2473","id":917538629,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY3MDU5MjI5","number":2473,"title":"Add Disfl-QA","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-10T16:18:00Z","updated_at":"2021-07-29T11:56:19Z","closed_at":"2021-07-29T11:56:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2473","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2473","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2473.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2473.patch","merged_at":"2021-07-29T11:56:18Z"},"body":"Dataset: https:\/\/github.com\/google-research-datasets\/disfl-qa\r\n\r\nTo-Do: Update README.md and add YAML tags","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2473\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2473\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2472","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2472\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2472\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2472\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2472","id":917463821,"node_id":"MDU6SXNzdWU5MTc0NjM4MjE=","number":2472,"title":"Fix automatic generation of Zenodo DOI","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":4,"created_at":"2021-06-10T15:15:46Z","updated_at":"2021-06-14T16:49:42Z","closed_at":"2021-06-14T16:49:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After the last release of Datasets (1.8.0), the automatic generation of the Zenodo DOI failed: it appears in yellow as \"Received\", instead of in green as \"Published\".\r\n\r\nI have contacted Zenodo support to fix this issue.\r\n\r\nTODO:\r\n- [x] Check with Zenodo to fix the issue\r\n- [x] Check BibTeX entry is right","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2472\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2472\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2471","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2471\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2471\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2471\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2471","id":917067165,"node_id":"MDU6SXNzdWU5MTcwNjcxNjU=","number":2471,"title":"Fix PermissionError on Windows when using tqdm >=4.50.0","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":0,"created_at":"2021-06-10T08:31:49Z","updated_at":"2021-06-11T15:11:50Z","closed_at":"2021-06-11T15:11:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"See: https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/235\/workflows\/cfb6a39f-68eb-4802-8b17-2cd5e8ea7369\/jobs\/1111\r\n\r\n```\r\nPermissionError: [WinError 32] The process cannot access the file because it is being used by another process\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2471\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2471\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2470","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2470\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2470\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2470\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2470","id":916724260,"node_id":"MDU6SXNzdWU5MTY3MjQyNjA=","number":2470,"title":"Crash when `num_proc` > dataset length for `map()` on a `datasets.Dataset`.","user":{"login":"mbforbes","id":1170062,"node_id":"MDQ6VXNlcjExNzAwNjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1170062?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mbforbes","html_url":"https:\/\/github.com\/mbforbes","followers_url":"https:\/\/api.github.com\/users\/mbforbes\/followers","following_url":"https:\/\/api.github.com\/users\/mbforbes\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mbforbes\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mbforbes\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mbforbes\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mbforbes\/orgs","repos_url":"https:\/\/api.github.com\/users\/mbforbes\/repos","events_url":"https:\/\/api.github.com\/users\/mbforbes\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mbforbes\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-06-09T22:40:22Z","updated_at":"2021-07-01T09:34:54Z","closed_at":"2021-07-01T09:11:13Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nCrash if when using `num_proc` > 1 (I used 16) for `map()` on a `datasets.Dataset`.\r\n\r\nI believe I've had cases where `num_proc` > 1 works before, but now it seems either inconsistent, or depends on my data. I'm not sure whether the issue is on my end, because it's difficult for me to debug! Any tips greatly appreciated, I'm happy to provide more info if it would helps us diagnose.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# this function will be applied with map()\r\ndef tokenize_function(examples):\r\n return tokenizer(\r\n examples[\"text\"],\r\n padding=PaddingStrategy.DO_NOT_PAD,\r\n truncation=True,\r\n )\r\n\r\n# data_files is a Dict[str, str] mapping name -> path\r\ndatasets = load_dataset(\"text\", data_files={...}) \r\n\r\n# this is where the error happens if num_proc = 16,\r\n# but is fine if num_proc = 1\r\ntokenized_datasets = datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n num_proc=num_workers,\r\n)\r\n```\r\n\r\n## Expected results\r\nThe `map()` function succeeds with `num_proc` > 1.\r\n\r\n## Actual results\r\n![image](https:\/\/user-images.githubusercontent.com\/1170062\/121404271-a6cc5200-c910-11eb-8e27-5c893bd04042.png)\r\n![image](https:\/\/user-images.githubusercontent.com\/1170062\/121404362-be0b3f80-c910-11eb-9117-658943029aef.png)\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.2\r\n- Platform: Linux-5.4.0-73-generic-x86_64-with-glibc2.31\r\n- Python version: 3.9.5\r\n- PyTorch version (GPU?): 1.8.1+cu111 (True)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Using GPU in script?: Yes, but I think N\/A for this issue\r\n- Using distributed or parallel set-up in script?: Multi-GPU on one machine, but I think also N\/A for this issue\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2470\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2470\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2469","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2469\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2469\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2469\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2469","id":916440418,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY2MTA1OTk1","number":2469,"title":"Bump tqdm version","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-09T17:24:40Z","updated_at":"2021-06-11T15:03:42Z","closed_at":"2021-06-11T15:03:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2469","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2469","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2469.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2469.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2469\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2469\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2468","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2468\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2468\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2468\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2468","id":916427320,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY2MDk0ODI5","number":2468,"title":"Implement ClassLabel encoding in JSON loader","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":1,"created_at":"2021-06-09T17:08:54Z","updated_at":"2021-06-28T15:39:54Z","closed_at":"2021-06-28T15:05:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2468","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2468","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2468.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2468.patch","merged_at":"2021-06-28T15:05:34Z"},"body":"Close #2365.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2468\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2468\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2466","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2466\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2466\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2466\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2466","id":915914098,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1NjY1MjQy","number":2466,"title":"change udpos features structure","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-09T08:03:31Z","updated_at":"2021-06-18T11:55:09Z","closed_at":"2021-06-16T10:41:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2466","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2466","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2466.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2466.patch","merged_at":"2021-06-16T10:41:37Z"},"body":"The structure is change such that each example is a sentence\r\nThe change is done for issues:\r\n#2061 \r\n#2444 \r\n\r\nClose #2061 , close #2444.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2466\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2466\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2465","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2465\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2465\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2465\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2465","id":915525071,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1MzMxMDMz","number":2465,"title":"adding masahaner dataset","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-08T21:20:25Z","updated_at":"2021-06-14T14:59:05Z","closed_at":"2021-06-14T14:59:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2465","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2465","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2465.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2465.patch","merged_at":"2021-06-14T14:59:05Z"},"body":"Adding Masakhane dataset https:\/\/github.com\/masakhane-io\/masakhane-ner \r\n\r\n@lhoestq , can you please review","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2465\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2465\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2464","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2464\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2464\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2464\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2464","id":915485601,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1Mjk1MDE5","number":2464,"title":"fix: adjusting indexing for the labels.","user":{"login":"drugilsberg","id":5406908,"node_id":"MDQ6VXNlcjU0MDY5MDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5406908?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/drugilsberg","html_url":"https:\/\/github.com\/drugilsberg","followers_url":"https:\/\/api.github.com\/users\/drugilsberg\/followers","following_url":"https:\/\/api.github.com\/users\/drugilsberg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/drugilsberg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/drugilsberg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/drugilsberg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/drugilsberg\/orgs","repos_url":"https:\/\/api.github.com\/users\/drugilsberg\/repos","events_url":"https:\/\/api.github.com\/users\/drugilsberg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/drugilsberg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-08T20:47:25Z","updated_at":"2021-06-09T10:15:46Z","closed_at":"2021-06-09T09:10:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2464","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2464","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2464.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2464.patch","merged_at":"2021-06-09T09:10:28Z"},"body":"The labels index were mismatching the actual ones used in the dataset. Specifically `0` is used for `SUPPORTS` and `1` is used for `REFUTES`\r\nAfter this change, the `README.md` now reflects the content of `dataset_infos.json`.\r\n\r\nSigned-off-by: Matteo Manica ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2464\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2464\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2463","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2463\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2463\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2463\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2463","id":915454788,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1MjY3NTA2","number":2463,"title":"Fix proto_qa download link","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-08T20:23:16Z","updated_at":"2021-06-10T12:49:56Z","closed_at":"2021-06-10T08:31:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2463","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2463","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2463.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2463.patch","merged_at":"2021-06-10T08:31:09Z"},"body":"Fixes #2459 \r\n\r\nInstead of updating the path, this PR fixes a commit hash as suggested by @lhoestq.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2463\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2463\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2462","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2462\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2462\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2462\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2462","id":915384613,"node_id":"MDU6SXNzdWU5MTUzODQ2MTM=","number":2462,"title":"Merge DatasetDict and Dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/8","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8\/labels","id":6968069,"node_id":"MI_kwDODunzps4AalMF","number":8,"title":"1.12","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":4,"closed_issues":2,"state":"open","created_at":"2021-07-21T15:34:56Z","updated_at":"2021-10-13T10:26:33Z","due_on":"2021-08-30T07:00:00Z","closed_at":null},"comments":0,"created_at":"2021-06-08T19:22:04Z","updated_at":"2021-09-02T05:33:32Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As discussed in #2424 and #2437 (please see there for detailed conversation):\r\n- It would be desirable to improve UX with respect the confusion between DatasetDict and Dataset.\r\n- The difference between Dataset and DatasetDict is an additional abstraction complexity that confuses \"typical\" end users. \r\n- A user expects a \"Dataset\" (whatever it contains multiple or a single split) and maybe it could be interesting to try to simplify the user-facing API as much as possible to hide this complexity from the end user.\r\n\r\nHere is a proposal for discussion and refined (and potential abandon if it's not good enough):\r\n- let's consider that a DatasetDict is also a Dataset with the various split concatenated one after the other\r\n- let's disallow the use of integers in split names (probably not a very big breaking change)\r\n- when you index with integers you access the examples progressively in split after the other is finished (in a deterministic order)\r\n- when you index with strings\/split name you have the same behavior as now (full backward compat)\r\n- let's then also have all the methods of a Dataset on the DatasetDict\r\n\r\nThe end goal would be to merge both Dataset and DatasetDict object in a single object that would be (pretty much totally) backward compatible with both.\r\n\r\n\r\nThere are a few things that we could discuss if we want to merge Dataset and DatasetDict:\r\n\r\n1. what happens if you index by a string ? Does it return the column or the split ? We could disallow conflicts between column names and split names to avoid ambiguities. It can be surprising to be able to get a column or a split using the same indexing feature\r\n ```\r\n from datasets import load_dataset\r\n\r\n dataset = load_dataset(...)\r\n dataset[\"train\"]\r\n dataset[\"input_ids\"]\r\n ```\r\n2. what happens when you iterate over the object ? I guess it should iterate over the examples as a Dataset object, but a DatasetDict used to iterate over the splits as they are the dictionary keys. This is a breaking change that we can discuss.\r\n\r\nMoreover regarding your points:\r\n\r\n- integers are not allowed as split names already\r\n- it's definitely doable to have all the methods. Maybe some of them like train_test_split that is currently only available for Dataset can be tweaked to work for a split dataset\r\n\r\n\r\ncc: @thomwolf @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2462\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2462\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2461","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2461\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2461\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2461\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2461","id":915286150,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1MTE3MTY4","number":2461,"title":"Support sliced list arrays in cast","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-08T17:38:47Z","updated_at":"2021-06-08T17:56:24Z","closed_at":"2021-06-08T17:56:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2461","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2461","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2461.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2461.patch","merged_at":"2021-06-08T17:56:23Z"},"body":"There is this issue in pyarrow:\r\n```python\r\nimport pyarrow as pa\r\n\r\narr = pa.array([[i * 10] for i in range(4)])\r\narr.cast(pa.list_(pa.int32())) # works\r\n\r\narr = arr.slice(1)\r\narr.cast(pa.list_(pa.int32())) # fails\r\n# ArrowNotImplementedError(\"Casting sliced lists (non-zero offset) not yet implemented\")\r\n```\r\n\r\nHowever in `Dataset.cast` we slice tables to cast their types (it's memory intensive), so we have the same issue.\r\nBecause of this it is currently not possible to cast a Dataset with a Sequence feature type (unless the table is small enough to not be sliced).\r\n\r\nIn this PR I fixed this by resetting the offset of `pyarrow.ListArray` arrays to zero in the table before casting.\r\nI used `pyarrow.compute.subtract` function to update the offsets of the ListArray.\r\n\r\ncc @abhi1thakur @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2461\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2461\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2460","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2460\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2460\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2460\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2460","id":915268536,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1MTAyMjA4","number":2460,"title":"Revert default in-memory for small datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/4","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/4","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/4\/labels","id":6680642,"node_id":"MDk6TWlsZXN0b25lNjY4MDY0Mg==","number":4,"title":"1.8","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":2,"state":"closed","created_at":"2021-04-20T16:49:16Z","updated_at":"2021-06-08T18:51:37Z","due_on":"2021-06-08T07:00:00Z","closed_at":"2021-06-08T18:51:04Z"},"comments":1,"created_at":"2021-06-08T17:14:23Z","updated_at":"2021-06-08T18:04:14Z","closed_at":"2021-06-08T17:55:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2460","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2460","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2460.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2460.patch","merged_at":"2021-06-08T17:55:43Z"},"body":"Close #2458","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2460\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2460\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2459","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2459\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2459\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2459\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2459","id":915222015,"node_id":"MDU6SXNzdWU5MTUyMjIwMTU=","number":2459,"title":"`Proto_qa` hosting seems to be broken","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-08T16:16:32Z","updated_at":"2021-06-10T08:31:09Z","closed_at":"2021-06-10T08:31:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe hosting (on Github) of the `proto_qa` dataset seems broken. I haven't investigated more yet, just flagging it for now. \r\n\r\n@zaidalyafeai if you want to dive into it, I think it's just a matter of changing the links in `proto_qa.py`\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"proto_qa\")\r\n```\r\n\r\n## Actual results\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 751, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 630, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/hf\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/proto_qa\/445346efaad5c5f200ecda4aa7f0fb50ff1b55edde3003be424a2112c3e8102e\/proto_qa.py\", line 131, in _split_generators\r\n train_fpath = dl_manager.download(_URLs[self.config.name][\"train\"])\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 199, in download\r\n num_proc=download_config.num_proc,\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 195, in map_nested\r\n return function(data_struct)\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 218, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 621, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/iesl\/protoqa-data\/master\/data\/train\/protoqa_train.jsonl\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2459\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2459\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2458","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2458\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2458\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2458\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2458","id":915199693,"node_id":"MDU6SXNzdWU5MTUxOTk2OTM=","number":2458,"title":"Revert default in-memory for small datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/4","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/4","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/4\/labels","id":6680642,"node_id":"MDk6TWlsZXN0b25lNjY4MDY0Mg==","number":4,"title":"1.8","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":2,"state":"closed","created_at":"2021-04-20T16:49:16Z","updated_at":"2021-06-08T18:51:37Z","due_on":"2021-06-08T07:00:00Z","closed_at":"2021-06-08T18:51:04Z"},"comments":1,"created_at":"2021-06-08T15:51:41Z","updated_at":"2021-06-08T18:57:11Z","closed_at":"2021-06-08T17:55:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Users are reporting issues and confusion about setting default in-memory to True for small datasets.\r\n\r\nWe see 2 clear use cases of Datasets:\r\n- the \"canonical\" way, where you can work with very large datasets, as they are memory-mapped and cached (after every transformation)\r\n- some edge cases (speed benchmarks, interactive\/exploratory analysis,...), where default in-memory can explicitly be enabled, and no caching will be done\r\n\r\nAfter discussing with @lhoestq we have agreed to:\r\n- revert this feature (implemented in #2182)\r\n- explain in the docs how to optimize speed\/performance by setting default in-memory\r\n\r\ncc: @stas00 https:\/\/github.com\/huggingface\/datasets\/pull\/2409#issuecomment-856210552","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2458\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2458\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2457","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2457\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2457\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2457\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2457","id":915079441,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY0OTQwMzQ0","number":2457,"title":"Add align_labels_with_mapping function","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-06-08T13:54:00Z","updated_at":"2022-01-12T08:57:41Z","closed_at":"2021-06-17T09:56:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2457","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2457","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2457.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2457.patch","merged_at":"2021-06-17T09:56:52Z"},"body":"This PR adds a helper function to align the `label2id` mapping between a `datasets.Dataset` and a classifier (e.g. a transformer with a `PretrainedConfig.label2id` dict), with the alignment performed on the dataset itself.\r\n\r\nThis will help us with the Hub evaluation, where we won't know in advance whether a model that is fine-tuned on say MNLI has the same mappings as the MNLI dataset we load from `datasets`.\r\n\r\nAn example where this is needed is if we naively try to evaluate `microsoft\/deberta-base-mnli` on `mnli` because the model config has the following mappings:\r\n\r\n```python\r\n \"id2label\": {\r\n \"0\": \"CONTRADICTION\",\r\n \"1\": \"NEUTRAL\",\r\n \"2\": \"ENTAILMENT\"\r\n },\r\n \"label2id\": {\r\n \"CONTRADICTION\": 0,\r\n \"ENTAILMENT\": 2,\r\n \"NEUTRAL\": 1\r\n }\r\n```\r\n\r\nwhile the `mnli` dataset has the `contradiction` and `neutral` labels swapped:\r\n\r\n```python\r\nid2label = {0: 'entailment', 1: 'neutral', 2: 'contradiction'}\r\nlabel2id = {'contradiction': 2, 'entailment': 0, 'neutral': 1}\r\n```\r\n\r\nAs a result, we get a much lower accuracy during evaluation:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom transformers.trainer_utils import EvalPrediction\r\nfrom transformers import AutoModelForSequenceClassification, Trainer\r\n\r\n# load dataset for evaluation\r\nmnli = load_dataset(\"glue\", \"mnli\", split=\"test\")\r\n# load model\r\nmodel_ckpt = \"microsoft\/deberta-base-mnli\"\r\nmodel = AutoModelForSequenceClassification.from_pretrained(checkpoint)\r\n# preprocess, create trainer ...\r\nmnli_enc = ...\r\ntrainer = Trainer(model, args=args, tokenizer=tokenizer)\r\n# generate preds\r\npreds = trainer.predict(mnli_enc)\r\n# preds.label_ids misalinged with model.config => returns wrong accuracy (too low)!\r\ncompute_metrics(EvalPrediction(preds.predictions, preds.label_ids))\r\n```\r\n\r\nThe fix is to use the helper function before running the evaluation to make sure the label IDs are aligned:\r\n\r\n```python\r\nmnli_enc_aligned = mnli_enc.align_labels_with_mapping(label2id=config.label2id, label_column=\"label\")\r\n# preds now aligned and everyone is happy :)\r\npreds = trainer.predict(mnli_enc_aligned)\r\n```\r\n\r\ncc @thomwolf @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2457\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2457\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2456","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2456\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2456\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2456\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2456","id":914709293,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY0NjAwOTk1","number":2456,"title":"Fix cross-reference typos in documentation","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-08T09:45:14Z","updated_at":"2021-06-08T17:41:37Z","closed_at":"2021-06-08T17:41:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2456","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2456","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2456.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2456.patch","merged_at":"2021-06-08T17:41:36Z"},"body":"Fix some minor typos in docs that avoid the creation of cross-reference links.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2456\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2456\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2455","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2455\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2455\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2455\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2455","id":914177468,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY0MTEzNjg2","number":2455,"title":"Update version in xor_tydi_qa.py","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-08T02:23:45Z","updated_at":"2021-06-14T15:35:25Z","closed_at":"2021-06-14T15:35:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2455","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2455","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2455.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2455.patch","merged_at":"2021-06-14T15:35:25Z"},"body":"Fix #2449\r\n\r\n@lhoestq Should I revert to the old `dummy\/1.0.0` or delete it and keep only `dummy\/1.1.0`?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2455\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2455\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2454","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2454\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2454\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2454\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2454","id":913883631,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYzODUyODU1","number":2454,"title":"Rename config and environment variable for in memory max size","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-07T19:21:08Z","updated_at":"2021-06-07T20:43:46Z","closed_at":"2021-06-07T20:43:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2454","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2454","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2454.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2454.patch","merged_at":"2021-06-07T20:43:46Z"},"body":"As discussed in #2409, both config and environment variable have been renamed.\r\n\r\ncc: @stas00, huggingface\/transformers#12056","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2454\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2454\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2453","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2453\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2453\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2453\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2453","id":913729258,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYzNzE3NTk2","number":2453,"title":"Keep original features order","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":5,"created_at":"2021-06-07T16:26:38Z","updated_at":"2021-06-15T18:05:36Z","closed_at":"2021-06-15T15:43:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2453","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2453","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2453.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2453.patch","merged_at":"2021-06-15T15:43:48Z"},"body":"When loading a Dataset from a JSON file whose column names are not sorted alphabetically, we should get the same column name order, whether we pass features (in the same order as in the file) or not.\r\n\r\nI found this issue while working on #2366.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2453\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2453\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2452","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2452\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2452\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2452\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2452","id":913603877,"node_id":"MDU6SXNzdWU5MTM2MDM4Nzc=","number":2452,"title":"MRPC test set differences between torch and tensorflow datasets","user":{"login":"FredericOdermatt","id":50372080,"node_id":"MDQ6VXNlcjUwMzcyMDgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50372080?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/FredericOdermatt","html_url":"https:\/\/github.com\/FredericOdermatt","followers_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/followers","following_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/orgs","repos_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/repos","events_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-07T14:20:26Z","updated_at":"2021-06-07T14:34:32Z","closed_at":"2021-06-07T14:34:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen using `load_dataset(\"glue\", \"mrpc\")` to load the MRPC dataset, the test set includes the labels. When using `tensorflow_datasets.load('glue\/{}'.format('mrpc'))` to load the dataset the test set does not contain the labels. There should be consistency between torch and tensorflow ways of importing the GLUE datasets.\r\n\r\n## Steps to reproduce the bug\r\n\r\nMinimal working code \r\n```python\r\nfrom datasets import load_dataset\r\nimport tensorflow as tf\r\nimport tensorflow_datasets\r\n\r\n# torch\r\ndataset = load_dataset(\"glue\", \"mrpc\")\r\n# tf\r\ndata = tensorflow_datasets.load('glue\/{}'.format('mrpc'))\r\ndata = list(data['test'].as_numpy_iterator())\r\nfor i in range(40,50):\r\n tf_sentence1 = data[i]['sentence1'].decode(\"utf-8\") \r\n tf_sentence2 = data[i]['sentence2'].decode(\"utf-8\") \r\n\r\n tf_label = data[i]['label']\r\n \r\n index = data[i]['idx']\r\n print('Index {}'.format(index))\r\n torch_sentence1 = dataset['test']['sentence1'][index]\r\n torch_sentence2 = dataset['test']['sentence2'][index]\r\n\r\n torch_label = dataset['test']['label'][index]\r\n print('Tensorflow: \\n\\tSentence1 {}\\n\\tSentence2 {}\\n\\tLabel {}'.format(tf_sentence1, tf_sentence2, tf_label))\r\n print('Torch: \\n\\tSentence1 {}\\n\\tSentence2 {}\\n\\tLabel {}'.format(torch_sentence1, torch_sentence2, torch_label))\r\n```\r\n\r\nSample output \r\n```\r\nIndex 954\r\nTensorflow: \r\n\tSentence1 Sabri Yakou , an Iraqi native who is a legal U.S. resident , appeared before a federal magistrate yesterday on charges of violating U.S. arms-control laws .\r\n\tSentence2 The elder Yakou , an Iraqi native who is a legal U.S. resident , appeared before a federal magistrate Wednesday on charges of violating U.S. arms control laws .\r\n\tLabel -1\r\nTorch: \r\n\tSentence1 Sabri Yakou , an Iraqi native who is a legal U.S. resident , appeared before a federal magistrate yesterday on charges of violating U.S. arms-control laws .\r\n\tSentence2 The elder Yakou , an Iraqi native who is a legal U.S. resident , appeared before a federal magistrate Wednesday on charges of violating U.S. arms control laws .\r\n\tLabel 1\r\nIndex 711\r\nTensorflow: \r\n\tSentence1 Others keep records sealed for as little as five years or as much as 30 .\r\n\tSentence2 Some states make them available immediately ; others keep them sealed for as much as 30 years .\r\n\tLabel -1\r\nTorch: \r\n\tSentence1 Others keep records sealed for as little as five years or as much as 30 .\r\n\tSentence2 Some states make them available immediately ; others keep them sealed for as much as 30 years .\r\n\tLabel 0\r\n```\r\n\r\n## Expected results\r\nI would expect the datasets to be independent of whether I am working with torch or tensorflow.\r\n\r\n## Actual results\r\nTest set labels are provided in the `datasets.load_datasets()` for MRPC. However MRPC is the only task where the test set labels are not -1.\r\n\r\n## Environment info\r\n- `datasets` version: 1.7.0\r\n- Platform: Linux-5.4.109+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2452\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2452\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2451","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2451\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2451\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2451\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2451","id":913263340,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYzMzIwNDY1","number":2451,"title":"Mention that there are no answers in adversarial_qa test set","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-07T08:13:57Z","updated_at":"2021-06-07T08:34:14Z","closed_at":"2021-06-07T08:34:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2451","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2451","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2451.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2451.patch","merged_at":"2021-06-07T08:34:13Z"},"body":"As mention in issue https:\/\/github.com\/huggingface\/datasets\/issues\/2447, there are no answers in the test set","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2451\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2451\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2450","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2450\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2450\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2450\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2450","id":912890291,"node_id":"MDU6SXNzdWU5MTI4OTAyOTE=","number":2450,"title":"BLUE file not found","user":{"login":"mirfan899","id":3822565,"node_id":"MDQ6VXNlcjM4MjI1NjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3822565?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mirfan899","html_url":"https:\/\/github.com\/mirfan899","followers_url":"https:\/\/api.github.com\/users\/mirfan899\/followers","following_url":"https:\/\/api.github.com\/users\/mirfan899\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mirfan899\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mirfan899\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mirfan899\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mirfan899\/orgs","repos_url":"https:\/\/api.github.com\/users\/mirfan899\/repos","events_url":"https:\/\/api.github.com\/users\/mirfan899\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mirfan899\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-06T17:01:54Z","updated_at":"2021-06-07T10:46:15Z","closed_at":"2021-06-07T10:46:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I'm having the following issue when I try to load the `blue` metric.\r\n\r\n```shell\r\nimport datasets\r\nmetric = datasets.load_metric('blue')\r\nTraceback (most recent call last):\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 320, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 621, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.7.0\/metrics\/blue\/blue.py\r\nDuring handling of the above exception, another exception occurred:\r\nTraceback (most recent call last):\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 332, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 621, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/metrics\/blue\/blue.py\r\nDuring handling of the above exception, another exception occurred:\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 605, in load_metric\r\n dataset=False,\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 343, in prepare_module\r\n combined_path, github_file_path\r\nFileNotFoundError: Couldn't find file locally at blue\/blue.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.7.0\/metrics\/blue\/blue.py.\r\nThe file is also not present on the master branch on github.\r\n```\r\nHere is dataset installed version info\r\n```shell\r\npip freeze | grep datasets\r\ndatasets==1.7.0\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2450\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2450\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2449","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2449\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2449\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2449\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2449","id":912751752,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYyODg1ODUz","number":2449,"title":"Update `xor_tydi_qa` url to v1.1","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-06-06T09:44:58Z","updated_at":"2021-06-07T15:16:21Z","closed_at":"2021-06-07T08:31:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2449","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2449","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2449.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2449.patch","merged_at":"2021-06-07T08:31:03Z"},"body":"The dataset is updated and the old url no longer works. So I updated it.\r\n\r\nI faced a bug while trying to fix this. Documenting the solution here. Maybe we can add it to the doc (`CONTRIBUTING.md` and `ADD_NEW_DATASET.md`).\r\n> And to make the command work without the ExpectedMoreDownloadedFiles error, you just need to use the --ignore_verifications flag.\r\nhttps:\/\/github.com\/huggingface\/datasets\/issues\/2076#issuecomment-803904366","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2449\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2449\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2448","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2448\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2448\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2448\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2448","id":912360109,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYyNTI2NjA3","number":2448,"title":"Fix flores download link","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-05T17:30:24Z","updated_at":"2021-06-08T20:02:58Z","closed_at":"2021-06-07T08:18:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2448","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2448","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2448.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2448.patch","merged_at":"2021-06-07T08:18:25Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2448\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2448\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2447","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2447\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2447\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2447\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2447","id":912299527,"node_id":"MDU6SXNzdWU5MTIyOTk1Mjc=","number":2447,"title":"dataset adversarial_qa has no answers in the \"test\" set","user":{"login":"bjascob","id":22728060,"node_id":"MDQ6VXNlcjIyNzI4MDYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22728060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bjascob","html_url":"https:\/\/github.com\/bjascob","followers_url":"https:\/\/api.github.com\/users\/bjascob\/followers","following_url":"https:\/\/api.github.com\/users\/bjascob\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bjascob\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bjascob\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bjascob\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bjascob\/orgs","repos_url":"https:\/\/api.github.com\/users\/bjascob\/repos","events_url":"https:\/\/api.github.com\/users\/bjascob\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bjascob\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-05T14:57:38Z","updated_at":"2021-06-07T11:13:07Z","closed_at":"2021-06-07T11:13:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen loading the adversarial_qa dataset the 'test' portion has no answers. Only the 'train' and 'validation' portions do. This occurs with all four of the configs ('adversarialQA', 'dbidaf', 'dbert', 'droberta')\r\n\r\n## Steps to reproduce the bug\r\n```\r\nfrom datasets import load_dataset\r\nexamples = load_dataset('adversarial_qa', 'adversarialQA', script_version=\"master\")['test']\r\nprint('Loaded {:,} examples'.format(len(examples)))\r\nhas_answers = 0\r\nfor e in examples:\r\n if e['answers']['text']:\r\n has_answers += 1\r\nprint('{:,} have answers'.format(has_answers))\r\n>>> Loaded 3,000 examples\r\n>>> 0 have answers\r\n\r\nexamples = load_dataset('adversarial_qa', 'adversarialQA', script_version=\"master\")['validation']\r\n<...code above...>\r\n>>> Loaded 3,000 examples\r\n>>> 3,000 have answers\r\n```\r\n\r\n## Expected results\r\nIf 'test' is a valid dataset, it should have answers. Also note that all of the 'train' and 'validation' sets have answers, there are no \"no answer\" questions with this set (not sure if this is correct or not).\r\n\r\n## Environment info\r\n- `datasets` version: 1.7.0\r\n- Platform: Linux-5.8.0-53-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.5\r\n- PyArrow version: 1.0.0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2447\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2447\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2446","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2446\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2446\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2446\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2446","id":911635399,"node_id":"MDU6SXNzdWU5MTE2MzUzOTk=","number":2446,"title":"`yelp_polarity` is broken","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-04T15:44:29Z","updated_at":"2021-06-04T18:56:47Z","closed_at":"2021-06-04T18:56:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"![image](https:\/\/user-images.githubusercontent.com\/22514219\/120828150-c4a35b00-c58e-11eb-8083-a537cee4dbb3.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2446\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2446\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2445","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2445\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2445\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2445\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2445","id":911577578,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYxODMzMTky","number":2445,"title":"Fix broken URLs for bn_hate_speech and covid_tweets_japanese","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-04T14:53:35Z","updated_at":"2021-06-04T17:39:46Z","closed_at":"2021-06-04T17:39:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2445","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2445","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2445.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2445.patch","merged_at":"2021-06-04T17:39:45Z"},"body":"Closes #2388 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2445\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2445\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2444","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2444\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2444\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2444\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2444","id":911297139,"node_id":"MDU6SXNzdWU5MTEyOTcxMzk=","number":2444,"title":"Sentence Boundaries missing in Dataset: xtreme \/ udpos","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-04T09:10:26Z","updated_at":"2021-06-18T11:53:43Z","closed_at":"2021-06-18T11:53:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I was browsing through annotation guidelines, as suggested by the datasets introduction.\r\n\r\nThe guidlines saids \"There must be exactly one blank line after every sentence, including the last sentence in the file. Empty sentences are not allowed.\" in the [Sentence Boundaries and Comments section](https:\/\/universaldependencies.org\/format.html#sentence-boundaries-and-comments)\r\n\r\nBut the sentence boundaries seems not to be represented by huggingface datasets features well. I found out that multiple sentence are concatenated together as a 1D array, without any delimiter.\r\n\r\nPAN-x, which is another token classification subset from xtreme do represent the sentence boundary using a 2D array.\r\n\r\nYou may compare in PAN-x.en and udpos.English in the explorer:\r\n https:\/\/huggingface.co\/datasets\/viewer\/?dataset=xtreme","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2444\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2444\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2443","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2443\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2443\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2443\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2443","id":909983574,"node_id":"MDU6SXNzdWU5MDk5ODM1NzQ=","number":2443,"title":"Some tests hang on Windows","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-03T00:27:30Z","updated_at":"2021-06-28T08:47:39Z","closed_at":"2021-06-28T08:47:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, several tests hang on Windows if the max path limit of 260 characters is not disabled. This happens due to the changes introduced by #2223 that cause an infinite loop in `WindowsFileLock` described in #2220. This can be very tricky to debug, so I think now is a good time to address these issues\/PRs. IMO throwing an error is too harsh, but maybe we can emit a warning in the top-level `__init__.py ` on startup if long paths are not enabled.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2443\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2443\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2442","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2442\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2442\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2442\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2442","id":909677029,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYwMjE1ODY1","number":2442,"title":"add english language tags for ~100 datasets","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-02T16:24:56Z","updated_at":"2021-06-04T09:51:40Z","closed_at":"2021-06-04T09:51:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2442","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2442","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2442.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2442.patch","merged_at":"2021-06-04T09:51:39Z"},"body":"As discussed on Slack, I have manually checked for ~100 datasets that they have at least one subset in English. This information was missing so adding into the READMEs.\r\n\r\nNote that I didn't check all the subsets so it's possible that some of the datasets have subsets in other languages than English...","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2442\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2442\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2441","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2441\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2441\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2441\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2441","id":908554713,"node_id":"MDU6SXNzdWU5MDg1NTQ3MTM=","number":2441,"title":"DuplicatedKeysError on personal dataset","user":{"login":"lucaguarro","id":22605313,"node_id":"MDQ6VXNlcjIyNjA1MzEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22605313?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucaguarro","html_url":"https:\/\/github.com\/lucaguarro","followers_url":"https:\/\/api.github.com\/users\/lucaguarro\/followers","following_url":"https:\/\/api.github.com\/users\/lucaguarro\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucaguarro\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucaguarro\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucaguarro\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucaguarro\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucaguarro\/repos","events_url":"https:\/\/api.github.com\/users\/lucaguarro\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucaguarro\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-01T17:59:41Z","updated_at":"2021-06-04T23:50:03Z","closed_at":"2021-06-04T23:50:03Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nEver since today, I have been getting a DuplicatedKeysError while trying to load my dataset from my own script.\r\nError returned when running this line: `dataset = load_dataset('\/content\/drive\/MyDrive\/Thesis\/Datasets\/book_preprocessing\/goodreads_maharjan_trimmed_and_nered\/goodreadsnered.py')`\r\nNote that my script was working fine with earlier versions of the Datasets library. Cannot say with 100% certainty if I have been doing something wrong with my dataset script this whole time or if this is simply a bug with the new version of datasets.\r\n\r\n## Steps to reproduce the bug\r\nI cannot provide code to reproduce the error as I am working with my own dataset. I can however provide my script if requested.\r\n\r\n## Expected results\r\nFor my data to be loaded.\r\n\r\n## Actual results\r\n**DuplicatedKeysError** exception is raised\r\n```\r\nDownloading and preparing dataset good_reads_practice_dataset\/main_domain (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/root\/.cache\/huggingface\/datasets\/good_reads_practice_dataset\/main_domain\/1.1.0\/64ff7c3fee2693afdddea75002eb6887d4fedc3d812ae3622128c8504ab21655...\r\n\r\n---------------------------------------------------------------------------\r\n\r\nDuplicatedKeysError Traceback (most recent call last)\r\n\r\n in ()\r\n----> 1 dataset = load_dataset('\/content\/drive\/MyDrive\/Thesis\/Datasets\/book_preprocessing\/goodreads_maharjan_trimmed_and_nered\/goodreadsnered.py')\r\n\r\n5 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, task, **config_kwargs)\r\n 749 try_from_hf_gcs=try_from_hf_gcs,\r\n 750 base_path=base_path,\r\n--> 751 use_auth_token=use_auth_token,\r\n 752 )\r\n 753 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 573 if not downloaded_from_gcs:\r\n 574 self._download_and_prepare(\r\n--> 575 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 576 )\r\n 577 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 650 try:\r\n 651 # Prepare split will record examples associated to the split\r\n--> 652 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 653 except OSError as e:\r\n 654 raise OSError(\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in _prepare_split(self, split_generator)\r\n 990 writer.write(example, key)\r\n 991 finally:\r\n--> 992 num_examples, num_bytes = writer.finalize()\r\n 993 \r\n 994 split_generator.split_info.num_examples = num_examples\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in finalize(self, close_stream)\r\n 407 # In case current_examples < writer_batch_size, but user uses finalize()\r\n 408 if self._check_duplicates:\r\n--> 409 self.check_duplicate_keys()\r\n 410 # Re-intializing to empty list for next batch\r\n 411 self.hkey_record = []\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in check_duplicate_keys(self)\r\n 347 for hash, key in self.hkey_record:\r\n 348 if hash in tmp_record:\r\n--> 349 raise DuplicatedKeysError(key)\r\n 350 else:\r\n 351 tmp_record.add(hash)\r\n\r\nDuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 0\r\nKeys should be unique and deterministic in nature\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.7.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.7.9\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2441\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2441\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2440","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2440\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2440\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2440\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2440","id":908521954,"node_id":"MDU6SXNzdWU5MDg1MjE5NTQ=","number":2440,"title":"Remove `extended` field from dataset tagger","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-01T17:18:42Z","updated_at":"2021-06-09T09:06:31Z","closed_at":"2021-06-09T09:06:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhile working on #2435 I used the [dataset tagger](https:\/\/huggingface.co\/datasets\/tagging\/) to generate the missing tags for the YAML metadata of each README.md file. However, it seems that our CI raises an error when the `extended` field is included:\r\n\r\n```\r\ndataset_name = 'arcd'\r\n\r\n @pytest.mark.parametrize(\"dataset_name\", get_changed_datasets(repo_path))\r\n def test_changed_dataset_card(dataset_name):\r\n card_path = repo_path \/ \"datasets\" \/ dataset_name \/ \"README.md\"\r\n assert card_path.exists()\r\n error_messages = []\r\n try:\r\n ReadMe.from_readme(card_path)\r\n except Exception as readme_error:\r\n error_messages.append(f\"The following issues have been found in the dataset cards:\\nREADME:\\n{readme_error}\")\r\n try:\r\n DatasetMetadata.from_readme(card_path)\r\n except Exception as metadata_error:\r\n error_messages.append(\r\n f\"The following issues have been found in the dataset cards:\\nYAML tags:\\n{metadata_error}\"\r\n )\r\n \r\n if error_messages:\r\n> raise ValueError(\"\\n\".join(error_messages))\r\nE ValueError: The following issues have been found in the dataset cards:\r\nE YAML tags:\r\nE __init__() got an unexpected keyword argument 'extended'\r\n\r\ntests\/test_dataset_cards.py:70: ValueError\r\n```\r\n\r\nConsider either removing this tag from the tagger or including it as part of the validation step in the CI.\r\n\r\ncc @yjernite ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2440\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2440\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2439","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2439\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2439\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2439\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2439","id":908511983,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU5MTkzMDE3","number":2439,"title":"Better error message when trying to access elements of a DatasetDict without specifying the split","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-01T17:04:32Z","updated_at":"2021-06-15T16:03:23Z","closed_at":"2021-06-07T08:54:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2439","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2439","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2439.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2439.patch","merged_at":"2021-06-07T08:54:35Z"},"body":"As mentioned in #2437 it'd be nice to to have an indication to the users when they try to access an element of a DatasetDict without specifying the split name.\r\n\r\ncc @thomwolf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2439\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2439\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2438","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2438\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2438\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2438\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2438","id":908461914,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU5MTQ5Njg0","number":2438,"title":"Fix NQ features loading: reorder fields of features to match nested fields order in arrow data","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-01T16:09:30Z","updated_at":"2021-06-04T09:02:31Z","closed_at":"2021-06-04T09:02:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2438","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2438","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2438.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2438.patch","merged_at":"2021-06-04T09:02:30Z"},"body":"As mentioned in #2401, there is an issue when loading the features of `natural_questions` since the order of the nested fields in the features don't match. The order is important since it matters for the underlying arrow schema.\r\n\r\nTo fix that I re-order the features based on the arrow schema:\r\n\r\n```python\r\ninferred_features = Features.from_arrow_schema(arrow_table.schema)\r\nself.info.features = self.info.features.reorder_fields_as(inferred_features)\r\nassert self.info.features.type == inferred_features.type\r\n```\r\n\r\nThe re-ordering is a recursive function. It takes into account that the `Sequence` feature type is a struct of list and not a list of struct.\r\n\r\nNow it's possible to load `natural_questions` again :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2438\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2438\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2437","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2437\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2437\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2437\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2437","id":908108882,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4ODUwNTkw","number":2437,"title":"Better error message when using the wrong load_from_disk","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-06-01T09:43:22Z","updated_at":"2021-06-08T18:03:50Z","closed_at":"2021-06-08T18:03:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2437","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2437","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2437.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2437.patch","merged_at":"2021-06-08T18:03:49Z"},"body":"As mentioned in #2424, the error message when one tries to use `Dataset.load_from_disk` to load a DatasetDict object (or _vice versa_) can be improved. I added a suggestion in the error message to let users know that they should use the other one.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2437\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2437\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2436","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2436\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2436\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2436\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2436","id":908100211,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4ODQzMzQy","number":2436,"title":"Update DatasetMetadata and ReadMe","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-01T09:32:37Z","updated_at":"2021-06-14T13:23:27Z","closed_at":"2021-06-14T13:23:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2436","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2436","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2436.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2436.patch","merged_at":"2021-06-14T13:23:26Z"},"body":"This PR contains the changes discussed in #2395.\r\n\r\n**Edit**:\r\nIn addition to those changes, I'll be updating the `ReadMe` as follows:\r\n\r\nCurrently, `Section` has separate parsing and validation error lists. In `.validate()`, we add these lists to the final lists and throw errors.\r\n\r\nOne way to make `ReadMe` consistent with `DatasetMetadata` and add a separate `.validate()` method is to throw separate parsing and validation errors. \r\n\r\nThis way, we don't have to throw validation errors, but only parsing errors in `__init__ ()`. We can have an option in `__init__()` to suppress parsing errors so that an object is created for validation. Doing this will allow the user to get all the errors in one go.\r\n\r\nIn `test_dataset_cards` , we are already catching error messages and appending to a list. This can be done for `ReadMe()` for parsing errors, and `ReadMe(...,suppress_errors=True); readme.validate()` for validation, separately.\r\n\r\n**Edit 2**:\r\nThe only parsing issue we have as of now is multiple headings at the same level with the same name. I assume this will happen very rarely, but it is still better to throw an error than silently pick one of them. It should be okay to separate it this way. \r\n\r\nWdyt @lhoestq ?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2436\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2436\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2435","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2435\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2435\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2435\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2435","id":907505531,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MzQzNDE2","number":2435,"title":"Insert Extractive QA templates for SQuAD-like datasets","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-31T14:09:11Z","updated_at":"2021-06-03T14:34:30Z","closed_at":"2021-06-03T14:32:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2435","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2435","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2435.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2435.patch","merged_at":"2021-06-03T14:32:27Z"},"body":"This PR adds task templates for 9 SQuAD-like templates with the following properties:\r\n\r\n* 1 config\r\n* A schema that matches the `squad` one (i.e. same column names, especially for the nested `answers` column because the current implementation does not support casting with mismatched columns. see #2434)\r\n* Less than 20GB (my laptop can't handle more right now)\r\n\r\nThe aim of this PR is to provide a few datasets to experiment with the task template integration in other libraries \/ services. \r\n\r\nPR #2429 should be merged before this one.\r\n\r\ncc @abhi1thakur ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2435\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2435\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2434","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2434\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2434\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2434\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2434","id":907503557,"node_id":"MDU6SXNzdWU5MDc1MDM1NTc=","number":2434,"title":"Extend QuestionAnsweringExtractive template to handle nested columns","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-31T14:06:51Z","updated_at":"2021-06-17T08:21:30Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently the `QuestionAnsweringExtractive` task template and `preprare_for_task` only support \"flat\" features. We should extend the functionality to cover QA datasets like:\r\n\r\n* `iapp_wiki_qa_squad`\r\n* `parsinlu_reading_comprehension`\r\n\r\nwhere the nested features differ with those from `squad` and trigger an `ArrowNotImplementedError`:\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nArrowNotImplementedError Traceback (most recent call last)\r\n in \r\n----> 1 ds.prepare_for_task(\"question-answering-extractive\")[0]\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in prepare_for_task(self, task)\r\n 1436 # We found a template so now flush `DatasetInfo` to skip the template update in `DatasetInfo.__post_init__`\r\n 1437 dataset.info.task_templates = None\r\n-> 1438 dataset = dataset.cast(features=template.features)\r\n 1439 return dataset\r\n 1440 \r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in cast(self, features, batch_size, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, num_proc)\r\n 977 format = self.format\r\n 978 dataset = self.with_format(\"arrow\")\r\n--> 979 dataset = dataset.map(\r\n 980 lambda t: t.cast(schema),\r\n 981 batched=True,\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint, desc)\r\n 1600 \r\n 1601 if num_proc is None or num_proc == 1:\r\n-> 1602 return self._map_single(\r\n 1603 function=function,\r\n 1604 with_indices=with_indices,\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 176 }\r\n 177 # apply actual function\r\n--> 178 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 179 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 180 # re-apply format to the output\r\n\r\n~\/git\/datasets\/src\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 395 # Call actual function\r\n 396 \r\n--> 397 out = func(self, *args, **kwargs)\r\n 398 \r\n 399 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, desc)\r\n 1940 ) # Something simpler?\r\n 1941 try:\r\n-> 1942 batch = apply_function_on_filtered_inputs(\r\n 1943 batch,\r\n 1944 indices,\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in apply_function_on_filtered_inputs(inputs, indices, check_same_num_examples, offset)\r\n 1836 effective_indices = [i + offset for i in indices] if isinstance(indices, list) else indices + offset\r\n 1837 processed_inputs = (\r\n-> 1838 function(*fn_args, effective_indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n 1839 )\r\n 1840 if update_data is None:\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in (t)\r\n 978 dataset = self.with_format(\"arrow\")\r\n 979 dataset = dataset.map(\r\n--> 980 lambda t: t.cast(schema),\r\n 981 batched=True,\r\n 982 batch_size=batch_size,\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.cast()\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.ChunkedArray.cast()\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/compute.py in cast(arr, target_type, safe)\r\n 241 else:\r\n 242 options = CastOptions.unsafe(target_type)\r\n--> 243 return call_function(\"cast\", [arr], options)\r\n 244 \r\n 245 \r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/_compute.pyx in pyarrow._compute.call_function()\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/_compute.pyx in pyarrow._compute.Function.call()\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowNotImplementedError: Unsupported cast from struct, answer_start: list, text: list> to struct using function cast_struct\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2434\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2434\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2433","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2433\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2433\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2433\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2433","id":907488711,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MzI5MDQ4","number":2433,"title":"Fix DuplicatedKeysError in adversarial_qa","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-31T13:48:47Z","updated_at":"2021-06-01T08:52:11Z","closed_at":"2021-06-01T08:52:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2433","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2433","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2433.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2433.patch","merged_at":"2021-06-01T08:52:10Z"},"body":"Fixes #2431","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2433\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2433\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2432","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2432\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2432\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2432\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2432","id":907462881,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MzA3MTE1","number":2432,"title":"Fix CI six installation on linux","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-31T13:15:36Z","updated_at":"2021-05-31T13:17:07Z","closed_at":"2021-05-31T13:17:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2432","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2432","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2432.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2432.patch","merged_at":"2021-05-31T13:17:06Z"},"body":"For some reason we end up with this error in the linux CI when running pip install .[tests]\r\n```\r\npip._vendor.resolvelib.resolvers.InconsistentCandidate: Provided candidate AlreadyInstalledCandidate(six 1.16.0 (\/usr\/local\/lib\/python3.6\/site-packages)) does not satisfy SpecifierRequirement('six>1.9'), SpecifierRequirement('six>1.9'), SpecifierRequirement('six>=1.11'), SpecifierRequirement('six~=1.15'), SpecifierRequirement('six'), SpecifierRequirement('six>=1.5.2'), SpecifierRequirement('six>=1.9.0'), SpecifierRequirement('six>=1.11.0'), SpecifierRequirement('six'), SpecifierRequirement('six>=1.6.1'), SpecifierRequirement('six>=1.9'), SpecifierRequirement('six>=1.5'), SpecifierRequirement('six<2.0'), SpecifierRequirement('six<2.0'), SpecifierRequirement('six'), SpecifierRequirement('six'), SpecifierRequirement('six~=1.15.0'), SpecifierRequirement('six'), SpecifierRequirement('six<2.0,>=1.6.1'), SpecifierRequirement('six'), SpecifierRequirement('six>=1.5.2'), SpecifierRequirement('six>=1.9.0')\r\n```\r\nexample CI failure here:\r\nhttps:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/6200\/workflows\/b64fdec9-f9e6-431c-acd7-e9f2c440c568\/jobs\/38247\r\n\r\nThe main version requirement comes from tensorflow: `six~=1.15.0`\r\nSo I pinned the six version to this.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2432\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2432\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2431","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2431\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2431\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2431\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2431","id":907413691,"node_id":"MDU6SXNzdWU5MDc0MTM2OTE=","number":2431,"title":"DuplicatedKeysError when trying to load adversarial_qa","user":{"login":"hanss0n","id":21348833,"node_id":"MDQ6VXNlcjIxMzQ4ODMz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21348833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hanss0n","html_url":"https:\/\/github.com\/hanss0n","followers_url":"https:\/\/api.github.com\/users\/hanss0n\/followers","following_url":"https:\/\/api.github.com\/users\/hanss0n\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hanss0n\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hanss0n\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hanss0n\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hanss0n\/orgs","repos_url":"https:\/\/api.github.com\/users\/hanss0n\/repos","events_url":"https:\/\/api.github.com\/users\/hanss0n\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hanss0n\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-31T12:11:19Z","updated_at":"2021-06-01T08:54:03Z","closed_at":"2021-06-01T08:52:11Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset = load_dataset('adversarial_qa', 'adversarialQA')\r\n```\r\n\r\n## Expected results\r\nThe dataset should be loaded into memory\r\n\r\n## Actual results\r\n\r\n>DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\n>Found duplicate Key: 4d3cb5677211ee32895ca9c66dad04d7152254d4\r\n>Keys should be unique and deterministic in nature\r\n>\r\n>\r\n>During handling of the above exception, another exception occurred:\r\n>\r\n>DuplicatedKeysError Traceback (most recent call last)\r\n>\r\n>\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in check_duplicate_keys(self)\r\n> 347 for hash, key in self.hkey_record:\r\n> 348 if hash in tmp_record:\r\n>--> 349 raise DuplicatedKeysError(key)\r\n> 350 else:\r\n> 351 tmp_record.add(hash)\r\n>\r\n>DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\n>Found duplicate Key: 4d3cb5677211ee32895ca9c66dad04d7152254d4\r\n>Keys should be unique and deterministic in nature\r\n\r\n## Environment info\r\n- `datasets` version: 1.7.0\r\n- Platform: Linux-5.4.109+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2431\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2431\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2430","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2430\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2430\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2430\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2430","id":907322595,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MTg3Njkw","number":2430,"title":"Add version-specific BibTeX","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-05-31T10:05:42Z","updated_at":"2021-06-08T07:53:22Z","closed_at":"2021-06-08T07:53:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2430","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2430","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2430.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2430.patch","merged_at":"2021-06-08T07:53:22Z"},"body":"As pointed out by @lhoestq in #2411, after the creation of the Zenodo DOI for Datasets, a new BibTeX entry is created with each release.\r\n\r\nThis PR adds a version-specific BibTeX entry, besides the existing one which is generic for the project.\r\n\r\nSee version-specific BibTeX entry here: https:\/\/zenodo.org\/record\/4817769\/export\/hx#.YLSyd6j7RPY","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2430\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2430\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2429","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2429\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2429\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2429\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2429","id":907321665,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MTg2ODc0","number":2429,"title":"Rename QuestionAnswering template to QuestionAnsweringExtractive","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-31T10:04:42Z","updated_at":"2021-05-31T15:57:26Z","closed_at":"2021-05-31T15:57:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2429","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2429","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2429.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2429.patch","merged_at":"2021-05-31T15:57:24Z"},"body":"Following the discussion with @thomwolf in #2255, this PR renames the QA template to distinguish extractive vs abstractive QA. The abstractive template will be added in a future PR.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2429\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2429\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2428","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2428\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2428\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2428\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2428","id":907169746,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MDU2MjI3","number":2428,"title":"Add copyright info for wiki_lingua dataset","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-31T07:22:52Z","updated_at":"2021-06-04T10:22:33Z","closed_at":"2021-06-04T10:22:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2428","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2428","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2428.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2428.patch","merged_at":"2021-06-04T10:22:33Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2428\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2428\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2427","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2427\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2427\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2427\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2427","id":907162923,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MDUwMjAx","number":2427,"title":"Add copyright info to MLSUM dataset","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-31T07:15:57Z","updated_at":"2021-06-04T09:53:50Z","closed_at":"2021-06-04T09:53:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2427","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2427","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2427.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2427.patch","merged_at":"2021-06-04T09:53:49Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2427\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2427\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2426","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2426\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2426\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2426\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2426","id":906473546,"node_id":"MDU6SXNzdWU5MDY0NzM1NDY=","number":2426,"title":"Saving Graph\/Structured Data in Datasets","user":{"login":"gsh199449","id":3295342,"node_id":"MDQ6VXNlcjMyOTUzNDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3295342?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gsh199449","html_url":"https:\/\/github.com\/gsh199449","followers_url":"https:\/\/api.github.com\/users\/gsh199449\/followers","following_url":"https:\/\/api.github.com\/users\/gsh199449\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gsh199449\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gsh199449\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gsh199449\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gsh199449\/orgs","repos_url":"https:\/\/api.github.com\/users\/gsh199449\/repos","events_url":"https:\/\/api.github.com\/users\/gsh199449\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gsh199449\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-29T13:35:21Z","updated_at":"2021-06-02T01:21:03Z","closed_at":"2021-06-02T01:21:03Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Thanks for this amazing library! And my question is I have structured data that is organized with a graph. For example, a dataset with users' friendship relations and user's articles. When I try to save a python dict in the dataset, an error occurred ``did not recognize Python value type when inferring an Arrow data type''.\r\nAlthough I also know that storing a python dict in pyarrow datasets is not the best practice, but I have no idea about how to save structured data in the Datasets. \r\n\r\nThank you very much for your help.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2426\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2426\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2425","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2425\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2425\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2425\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2425","id":906385457,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU3NDAwMjM3","number":2425,"title":"Fix Docstring Mistake: dataset vs. metric","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-05-29T06:09:53Z","updated_at":"2021-06-01T08:18:04Z","closed_at":"2021-06-01T08:18:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2425","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2425","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2425.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2425.patch","merged_at":"2021-06-01T08:18:04Z"},"body":"PR to fix #2412","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2425\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2425\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2424","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2424\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2424\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2424\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2424","id":906193679,"node_id":"MDU6SXNzdWU5MDYxOTM2Nzk=","number":2424,"title":"load_from_disk and save_to_disk are not compatible with each other","user":{"login":"roholazandie","id":7584674,"node_id":"MDQ6VXNlcjc1ODQ2NzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7584674?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/roholazandie","html_url":"https:\/\/github.com\/roholazandie","followers_url":"https:\/\/api.github.com\/users\/roholazandie\/followers","following_url":"https:\/\/api.github.com\/users\/roholazandie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/roholazandie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/roholazandie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/roholazandie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/roholazandie\/orgs","repos_url":"https:\/\/api.github.com\/users\/roholazandie\/repos","events_url":"https:\/\/api.github.com\/users\/roholazandie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/roholazandie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-28T23:07:10Z","updated_at":"2021-06-08T19:22:32Z","closed_at":"2021-06-08T19:22:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nload_from_disk and save_to_disk are not compatible. When I use save_to_disk to save a dataset to disk it works perfectly but given the same directory load_from_disk throws an error that it can't find state.json. looks like the load_from_disk only works on one split\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"art\")\r\ndataset.save_to_disk(\"mydir\")\r\nd = Dataset.load_from_disk(\"mydir\")\r\n```\r\n\r\n## Expected results\r\nIt is expected that these two functions be the reverse of each other without more manipulation\r\n\r\n## Actual results\r\nFileNotFoundError: [Errno 2] No such file or directory: 'mydir\/art\/state.json'\r\n\r\n## Environment info\r\n- `datasets` version: 1.6.2\r\n- Platform: Linux-5.4.0-73-generic-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.10\r\n- PyTorch version (GPU?): 1.8.1+cu102 (True)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Using GPU in script?: \r\n- Using distributed or parallel set-up in script?: \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2424\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2424\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2423","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2423\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2423\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2423\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2423","id":905935753,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU2OTc5MjA5","number":2423,"title":"add `desc` in `map` for `DatasetDict` object","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-28T19:28:44Z","updated_at":"2021-05-31T14:51:23Z","closed_at":"2021-05-31T13:08:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2423","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2423","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2423.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2423.patch","merged_at":"2021-05-31T13:08:04Z"},"body":"`desc` in `map` currently only works with `Dataset` objects. This PR adds support for `DatasetDict` objects as well","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2423\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2423\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2422","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2422\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2422\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2422\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2422","id":905568548,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU2NjM3MzY1","number":2422,"title":"Fix save_to_disk nested features order in dataset_info.json","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-28T15:03:28Z","updated_at":"2021-05-28T15:26:57Z","closed_at":"2021-05-28T15:26:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2422","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2422","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2422.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2422.patch","merged_at":"2021-05-28T15:26:56Z"},"body":"Fix issue https:\/\/github.com\/huggingface\/datasets\/issues\/2267\r\n\r\nThe order of the nested features matters (pyarrow limitation), but the save_to_disk method was saving the features types as JSON with `sort_keys=True`, which was breaking the order of the nested features.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2422\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2422\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2421","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2421\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2421\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2421\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2421","id":905549756,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU2NjIwMTM3","number":2421,"title":"doc: fix typo HF_MAX_IN_MEMORY_DATASET_SIZE_IN_BYTES","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-28T14:52:10Z","updated_at":"2021-06-04T09:52:45Z","closed_at":"2021-06-04T09:52:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2421","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2421","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2421.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2421.patch","merged_at":"2021-06-04T09:52:45Z"},"body":"MAX_MEMORY_DATASET_SIZE_IN_BYTES should be HF_MAX_MEMORY_DATASET_SIZE_IN_BYTES","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2421\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2421\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2420","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2420\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2420\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2420\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2420","id":904821772,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1OTQ1ODgw","number":2420,"title":"Updated Dataset Description","user":{"login":"binny-mathew","id":10741860,"node_id":"MDQ6VXNlcjEwNzQxODYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10741860?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/binny-mathew","html_url":"https:\/\/github.com\/binny-mathew","followers_url":"https:\/\/api.github.com\/users\/binny-mathew\/followers","following_url":"https:\/\/api.github.com\/users\/binny-mathew\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/binny-mathew\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/binny-mathew\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/binny-mathew\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/binny-mathew\/orgs","repos_url":"https:\/\/api.github.com\/users\/binny-mathew\/repos","events_url":"https:\/\/api.github.com\/users\/binny-mathew\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/binny-mathew\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-28T07:10:51Z","updated_at":"2021-06-10T12:11:35Z","closed_at":"2021-06-10T12:11:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2420","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2420","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2420.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2420.patch","merged_at":"2021-06-10T12:11:35Z"},"body":"Added Point of contact information and several other details about the dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2420\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2420\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2419","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2419\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2419\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2419\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2419","id":904347339,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1NTA1OTM1","number":2419,"title":"adds license information for DailyDialog.","user":{"login":"aditya2211","id":11574558,"node_id":"MDQ6VXNlcjExNTc0NTU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11574558?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aditya2211","html_url":"https:\/\/github.com\/aditya2211","followers_url":"https:\/\/api.github.com\/users\/aditya2211\/followers","following_url":"https:\/\/api.github.com\/users\/aditya2211\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aditya2211\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aditya2211\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aditya2211\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aditya2211\/orgs","repos_url":"https:\/\/api.github.com\/users\/aditya2211\/repos","events_url":"https:\/\/api.github.com\/users\/aditya2211\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aditya2211\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-27T23:03:42Z","updated_at":"2021-05-31T13:16:52Z","closed_at":"2021-05-31T13:16:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2419","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2419","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2419.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2419.patch","merged_at":"2021-05-31T13:16:52Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2419\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2419\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2418","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2418\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2418\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2418\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2418","id":904051497,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1MjM2OTEz","number":2418,"title":"add utf-8 while reading README","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-27T18:12:28Z","updated_at":"2021-06-04T09:55:01Z","closed_at":"2021-06-04T09:55:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2418","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2418","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2418.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2418.patch","merged_at":"2021-06-04T09:55:00Z"},"body":"It was causing tests to fail in Windows (see #2416). In Windows, the default encoding is CP1252 which is unable to decode the character byte 0x9d ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2418\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2418\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2417","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2417\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2417\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2417\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2417","id":903956071,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1MTU3NTI4","number":2417,"title":"Make datasets PEP-561 compliant","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-27T16:16:17Z","updated_at":"2021-05-28T13:10:10Z","closed_at":"2021-05-28T13:09:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2417","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2417","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2417.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2417.patch","merged_at":"2021-05-28T13:09:16Z"},"body":"Allows to type-check datasets with `mypy` when imported as a third-party library\r\n\r\nPEP-561: https:\/\/www.python.org\/dev\/peps\/pep-0561\r\nMyPy doc on the subject: https:\/\/mypy.readthedocs.io\/en\/stable\/installed_packages.html\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2417\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2417\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2416","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2416\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2416\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2416\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2416","id":903932299,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1MTM3NDUy","number":2416,"title":"Add KLUE dataset","user":{"login":"jungwhank","id":53588015,"node_id":"MDQ6VXNlcjUzNTg4MDE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53588015?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jungwhank","html_url":"https:\/\/github.com\/jungwhank","followers_url":"https:\/\/api.github.com\/users\/jungwhank\/followers","following_url":"https:\/\/api.github.com\/users\/jungwhank\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jungwhank\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jungwhank\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jungwhank\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jungwhank\/orgs","repos_url":"https:\/\/api.github.com\/users\/jungwhank\/repos","events_url":"https:\/\/api.github.com\/users\/jungwhank\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jungwhank\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-05-27T15:49:51Z","updated_at":"2021-06-09T15:00:02Z","closed_at":"2021-06-04T17:45:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2416","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2416","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2416.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2416.patch","merged_at":"2021-06-04T17:45:15Z"},"body":"Add `KLUE (Korean Language Understanding Evaluation)` dataset released recently from [paper](https:\/\/arxiv.org\/abs\/2105.09680), [github](https:\/\/github.com\/KLUE-benchmark\/KLUE) and [webpage](https:\/\/klue-benchmark.com\/tasks).\r\nPlease let me know if there's anything missing in the code or README.\r\nThanks!\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2416\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2416\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2415","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2415\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2415\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2415\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2415","id":903923097,"node_id":"MDU6SXNzdWU5MDM5MjMwOTc=","number":2415,"title":"Cached dataset not loaded","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-27T15:40:06Z","updated_at":"2021-06-02T13:15:47Z","closed_at":"2021-06-02T13:15:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nI have a large dataset (common_voice, english) where I use several map and filter functions.\r\nSometimes my cached datasets after specific functions are not loaded.\r\nI always use the same arguments, same functions, no seed\u2026\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndef filter_by_duration(batch):\r\n return (\r\n batch[\"duration\"] <= 10\r\n and batch[\"duration\"] >= 1\r\n and len(batch[\"target_text\"]) > 5\r\n )\r\n\r\ndef prepare_dataset(batch):\r\n batch[\"input_values\"] = processor(\r\n batch[\"speech\"], sampling_rate=batch[\"sampling_rate\"][0]\r\n ).input_values\r\n with processor.as_target_processor():\r\n batch[\"labels\"] = processor(batch[\"target_text\"]).input_ids\r\n return batch\r\n\r\ntrain_dataset = train_dataset.filter(\r\n filter_by_duration,\r\n remove_columns=[\"duration\"],\r\n num_proc=data_args.preprocessing_num_workers,\r\n)\r\n\r\n# PROBLEM HERE -> below function is reexecuted and cache is not loaded\r\ntrain_dataset = train_dataset.map(\r\n prepare_dataset,\r\n remove_columns=train_dataset.column_names,\r\n batch_size=training_args.per_device_train_batch_size,\r\n batched=True,\r\n num_proc=data_args.preprocessing_num_workers,\r\n)\r\n\r\n# Later in script\r\nset_caching_enabled(False)\r\n# apply map on trained model to eval\/test sets\r\n\r\n```\r\n\r\n## Expected results\r\nThe cached dataset should always be reloaded.\r\n\r\n## Actual results\r\nThe function is reexecuted.\r\n\r\nI have access to cached files `cache-xxxxx.arrow`.\r\nIs there a way I can somehow load manually 2 versions and see how the hash was created for debug purposes (to know if it's an issue with dataset or function)?\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.2\r\n- Platform: Linux-5.8.0-45-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.5\r\n- PyTorch version (GPU?): 1.8.1+cu102 (True)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Using GPU in script?: Yes\r\n- Using distributed or parallel set-up in script?: No","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2415\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2415\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2414","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2414\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2414\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2414\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2414","id":903877096,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1MDg5OTIw","number":2414,"title":"Update README.md","user":{"login":"cryoff","id":15029054,"node_id":"MDQ6VXNlcjE1MDI5MDU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15029054?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cryoff","html_url":"https:\/\/github.com\/cryoff","followers_url":"https:\/\/api.github.com\/users\/cryoff\/followers","following_url":"https:\/\/api.github.com\/users\/cryoff\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cryoff\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cryoff\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cryoff\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cryoff\/orgs","repos_url":"https:\/\/api.github.com\/users\/cryoff\/repos","events_url":"https:\/\/api.github.com\/users\/cryoff\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cryoff\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-27T14:53:19Z","updated_at":"2021-06-28T13:46:14Z","closed_at":"2021-06-28T13:04:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2414","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2414","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2414.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2414.patch","merged_at":"2021-06-28T13:04:56Z"},"body":"Provides description of data instances and dataset features\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2414\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2414\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2413","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2413\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2413\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2413\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2413","id":903777557,"node_id":"MDU6SXNzdWU5MDM3Nzc1NTc=","number":2413,"title":"AttributeError: 'DatasetInfo' object has no attribute 'task_templates'","user":{"login":"jungwhank","id":53588015,"node_id":"MDQ6VXNlcjUzNTg4MDE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53588015?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jungwhank","html_url":"https:\/\/github.com\/jungwhank","followers_url":"https:\/\/api.github.com\/users\/jungwhank\/followers","following_url":"https:\/\/api.github.com\/users\/jungwhank\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jungwhank\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jungwhank\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jungwhank\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jungwhank\/orgs","repos_url":"https:\/\/api.github.com\/users\/jungwhank\/repos","events_url":"https:\/\/api.github.com\/users\/jungwhank\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jungwhank\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-27T13:44:28Z","updated_at":"2021-06-01T01:05:47Z","closed_at":"2021-06-01T01:05:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nHello, \r\nI'm trying to add dataset and contribute, but test keep fail with below cli.\r\n` RUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_dataset_all_configs_`\r\n\r\n## Steps to reproduce the bug\r\nIt seems like a bug when I see an error with the existing dataset, not the dataset I'm trying to add.\r\n\r\n` RUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_dataset_all_configs_`\r\n\r\n\r\n## Expected results\r\nAll test passed\r\n\r\n## Actual results\r\n```\r\n # check that dataset is not empty\r\n self.parent.assertListEqual(sorted(dataset_builder.info.splits.keys()), sorted(dataset))\r\n for split in dataset_builder.info.splits.keys():\r\n # check that loaded datset is not empty\r\n self.parent.assertTrue(len(dataset[split]) > 0)\r\n \r\n # check that we can cast features for each task template\r\n> task_templates = dataset_builder.info.task_templates\r\nE AttributeError: 'DatasetInfo' object has no attribute 'task_templates'\r\n\r\ntests\/test_dataset_common.py:175: AttributeError\r\n```\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.2\r\n- Platform: Darwin-20.4.0-x86_64-i386-64bit\r\n- Python version: 3.7.7\r\n- PyTorch version (GPU?): 1.7.0 (False)\r\n- Tensorflow version (GPU?): 2.3.0 (False)\r\n- Using GPU in script?: No\r\n- Using distributed or parallel set-up in script?: No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2413\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2413\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2412","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2412\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2412\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2412\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2412","id":903769151,"node_id":"MDU6SXNzdWU5MDM3NjkxNTE=","number":2412,"title":"Docstring mistake: dataset vs. metric","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-27T13:39:11Z","updated_at":"2021-06-01T08:18:04Z","closed_at":"2021-06-01T08:18:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/d95b95f8cf3cb0cff5f77a675139b584dcfcf719\/src\/datasets\/load.py#L582\r\n\r\nShould better be something like:\r\n\r\n`a metric identifier on HuggingFace AWS bucket (list all available metrics and ids with ``datasets.list_metrics()``)`\r\n\r\nI can provide a PR l8er...","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2412\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2412\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2411","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2411\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2411\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2411\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2411","id":903671778,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU0OTAzNjg2","number":2411,"title":"Add DOI badge to README","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-27T12:36:47Z","updated_at":"2021-05-27T13:42:54Z","closed_at":"2021-05-27T13:42:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2411","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2411","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2411.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2411.patch","merged_at":"2021-05-27T13:42:54Z"},"body":"Once published the latest release, the DOI badge has been automatically generated by Zenodo.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2411\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2411\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2410","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2410\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2410\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2410\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2410","id":903613676,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU0ODUwMjY4","number":2410,"title":"fix #2391 add original answers in kilt-TriviaQA","user":{"login":"PaulLerner","id":25532159,"node_id":"MDQ6VXNlcjI1NTMyMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25532159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulLerner","html_url":"https:\/\/github.com\/PaulLerner","followers_url":"https:\/\/api.github.com\/users\/PaulLerner\/followers","following_url":"https:\/\/api.github.com\/users\/PaulLerner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulLerner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulLerner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulLerner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulLerner\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulLerner\/repos","events_url":"https:\/\/api.github.com\/users\/PaulLerner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulLerner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-27T11:54:29Z","updated_at":"2021-06-15T12:35:57Z","closed_at":"2021-06-14T17:29:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2410","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2410","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2410.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2410.patch","merged_at":"2021-06-14T17:29:10Z"},"body":"cc @yjernite is it ok like this?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2410\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2410\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2409","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2409\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2409\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2409\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2409","id":903441398,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU0Njk3NjA0","number":2409,"title":"Add HF_ prefix to env var MAX_IN_MEMORY_DATASET_SIZE_IN_BYTES","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":14,"created_at":"2021-05-27T09:07:00Z","updated_at":"2021-06-08T16:00:55Z","closed_at":"2021-05-27T09:33:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2409","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2409","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2409.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2409.patch","merged_at":"2021-05-27T09:33:41Z"},"body":"As mentioned in https:\/\/github.com\/huggingface\/datasets\/pull\/2399 the env var should be prefixed by HF_","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2409\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2409\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2408","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2408\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2408\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2408\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2408","id":903422648,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU0NjgxMzE4","number":2408,"title":"Fix head_qa keys","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-27T08:50:19Z","updated_at":"2021-05-27T09:05:37Z","closed_at":"2021-05-27T09:05:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2408","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2408","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2408.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2408.patch","merged_at":"2021-05-27T09:05:36Z"},"body":"There were duplicate in the keys, as mentioned in #2382 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2408\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2408\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2407","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2407\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2407\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2407\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2407","id":903111755,"node_id":"MDU6SXNzdWU5MDMxMTE3NTU=","number":2407,"title":".map() function got an unexpected keyword argument 'cache_file_name'","user":{"login":"cindyxinyiwang","id":7390482,"node_id":"MDQ6VXNlcjczOTA0ODI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7390482?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cindyxinyiwang","html_url":"https:\/\/github.com\/cindyxinyiwang","followers_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/followers","following_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/repos","events_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-27T01:54:26Z","updated_at":"2021-05-27T13:46:40Z","closed_at":"2021-05-27T13:46:40Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI'm trying to save the result of datasets.map() to a specific file, so that I can easily share it among multiple computers without reprocessing the dataset. However, when I try to pass an argument 'cache_file_name' to the .map() function, it throws an error that \".map() function got an unexpected keyword argument 'cache_file_name'\". \r\n\r\nI believe I'm using the latest dataset 1.6.2. Also seems like the document and the actual code indicates there is an argument 'cache_file_name' for the .map() function.\r\n\r\nHere is the code I use\r\n## Steps to reproduce the bug\r\n```datasets = load_from_disk(dataset_path=my_path)\r\n\r\n[...]\r\n\r\ndef tokenize_function(examples):\r\n return tokenizer(examples[text_column_name])\r\n\r\nlogger.info(\"Mapping dataset to tokenized dataset.\")\r\ntokenized_datasets = datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n num_proc=preprocessing_num_workers,\r\n remove_columns=column_names,\r\n load_from_cache_file=True,\r\n cache_file_name=\"my_tokenized_file\"\r\n)\r\n```\r\n\r\n## Actual results\r\n tokenized_datasets = datasets.map(\r\nTypeError: map() got an unexpected keyword argument 'cache_file_name'\r\n\r\n## Environment info\r\n\r\n- `datasets` version:1.6.2\r\n- Platform:Linux-4.18.0-193.28.1.el8_2.x86_64-x86_64-with-glibc2.10\r\n- Python version:3.8.5\r\n- PyArrow version:3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2407\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2407\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2406","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2406\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2406\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2406\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2406","id":902643844,"node_id":"MDU6SXNzdWU5MDI2NDM4NDQ=","number":2406,"title":"Add guide on using task templates to documentation","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-26T16:28:26Z","updated_at":"2021-05-26T16:28:26Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Once we have a stable API on the text classification and question answering task templates, add a guide on how to use them in the documentation.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2406\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2406\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2405","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2405\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2405\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2405\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2405","id":901227658,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUyNzA2OTk1","number":2405,"title":"Add dataset tags","user":{"login":"OyvindTafjord","id":6453366,"node_id":"MDQ6VXNlcjY0NTMzNjY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6453366?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/OyvindTafjord","html_url":"https:\/\/github.com\/OyvindTafjord","followers_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/followers","following_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/orgs","repos_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/repos","events_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-25T18:57:29Z","updated_at":"2021-05-26T16:54:16Z","closed_at":"2021-05-26T16:40:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2405","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2405","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2405.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2405.patch","merged_at":"2021-05-26T16:40:07Z"},"body":"The dataset tags were provided by Peter Clark following the guide.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2405\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2405\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2404","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2404\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2404\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2404\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2404","id":901179832,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUyNjYzOTcz","number":2404,"title":"Paperswithcode dataset mapping","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-25T18:14:26Z","updated_at":"2021-05-26T11:21:25Z","closed_at":"2021-05-26T11:17:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2404","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2404","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2404.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2404.patch","merged_at":"2021-05-26T11:17:18Z"},"body":"This is a continuation of https:\/\/github.com\/huggingface\/huggingface_hub\/pull\/43, encoded directly inside dataset cards.\r\n\r\nAs discussed:\r\n- `paperswithcode_id: null` when the dataset doesn't exist on paperswithcode's side.\r\n- I've added this new key at the end of the yaml instead of ordering all keys alphabetically as pyyaml's default. No strong opinion on that one though\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2404\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2404\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2403","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2403\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2403\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2403\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2403","id":900059014,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUxNjcxMTMw","number":2403,"title":"Free datasets with cache file in temp dir on exit","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-24T22:15:11Z","updated_at":"2021-05-26T17:25:19Z","closed_at":"2021-05-26T16:39:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2403","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2403","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2403.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2403.patch","merged_at":"2021-05-26T16:39:29Z"},"body":"This PR properly cleans up the memory-mapped tables that reference the cache files inside the temp dir.\r\nSince the built-in `_finalizer` of `TemporaryDirectory` can't be modified, this PR defines its own `TemporaryDirectory` class that accepts a custom clean-up function.\r\n\r\nFixes #2402","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2403\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2403\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2402","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2402\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2402\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2402\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2402","id":900025329,"node_id":"MDU6SXNzdWU5MDAwMjUzMjk=","number":2402,"title":"PermissionError on Windows when using temp dir for caching","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-24T21:22:59Z","updated_at":"2021-05-26T16:39:29Z","closed_at":"2021-05-26T16:39:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, the following code raises a PermissionError on master if working on Windows:\r\n\r\n```python\r\n# run as a script or call exit() in REPL to initiate the temp dir cleanup\r\nfrom datasets import *\r\nd = load_dataset(\"sst\", split=\"train\", keep_in_memory=False)\r\nset_caching_enabled(False)\r\nd.map(lambda ex: ex)\r\n```\r\n\r\nError stack trace:\r\n```\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\weakref.py\", line 624, in _exitfunc\r\n f()\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\weakref.py\", line 548, in __call__\r\n return info.func(*info.args, **(info.kwargs or {}))\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\tempfile.py\", line 799, in _cleanup\r\n _shutil.rmtree(name)\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\shutil.py\", line 500, in rmtree\r\n return _rmtree_unsafe(path, onerror)\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\shutil.py\", line 395, in _rmtree_unsafe\r\n onerror(os.unlink, fullname, sys.exc_info())\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\shutil.py\", line 393, in _rmtree_unsafe\r\n os.unlink(fullname)\r\nPermissionError: [WinError 5] Access is denied: 'C:\\\\Users\\\\Mario\\\\AppData\\\\Local\\\\Temp\\\\tmp20epyhmq\\\\cache-87a87ffb5a956e68.arrow'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2402\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2402\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2401","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2401\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2401\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2401\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2401","id":899910521,"node_id":"MDU6SXNzdWU4OTk5MTA1MjE=","number":2401,"title":"load_dataset('natural_questions') fails with \"ValueError: External features info don't match the dataset\"","user":{"login":"jonrbates","id":15602718,"node_id":"MDQ6VXNlcjE1NjAyNzE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15602718?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonrbates","html_url":"https:\/\/github.com\/jonrbates","followers_url":"https:\/\/api.github.com\/users\/jonrbates\/followers","following_url":"https:\/\/api.github.com\/users\/jonrbates\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonrbates\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonrbates\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonrbates\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonrbates\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonrbates\/repos","events_url":"https:\/\/api.github.com\/users\/jonrbates\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonrbates\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-05-24T18:38:53Z","updated_at":"2021-06-09T09:07:25Z","closed_at":"2021-06-09T09:07:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nload_dataset('natural_questions') throws ValueError\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndatasets = load_dataset('natural_questions', split='validation[:10]')\r\n```\r\n\r\n## Expected results\r\nCall to load_dataset returns data.\r\n\r\n## Actual results\r\n```\r\nUsing custom data configuration default\r\nReusing dataset natural_questions (\/mnt\/d\/huggingface\/datasets\/natural_questions\/default\/0.0.2\/19bc04755018a3ad02ee74f7045cde4ba9b4162cb64450a87030ab786b123b76)\r\n---------------------------------------------------------------------------\r\nValueError Traceback (most recent call last)\r\n in \r\n----> 1 datasets = load_dataset('natural_questions', split='validation[:10]', cache_dir='\/mnt\/d\/huggingface\/datasets')\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, **config_kwargs)\r\n 756 keep_in_memory if keep_in_memory is not None else is_small_dataset(builder_instance.info.dataset_size)\r\n 757 )\r\n--> 758 ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n 759 if save_infos:\r\n 760 builder_instance._save_infos()\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/builder.py in as_dataset(self, split, run_post_process, ignore_verifications, in_memory)\r\n 735 \r\n 736 # Create a dataset for each of the given splits\r\n--> 737 datasets = utils.map_nested(\r\n 738 partial(\r\n 739 self._build_single_dataset,\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py in map_nested(function, data_struct, dict_only, map_list, map_tuple, map_numpy, num_proc, types)\r\n 193 # Singleton\r\n 194 if not isinstance(data_struct, dict) and not isinstance(data_struct, types):\r\n--> 195 return function(data_struct)\r\n 196 \r\n 197 disable_tqdm = bool(logger.getEffectiveLevel() > INFO)\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/builder.py in _build_single_dataset(self, split, run_post_process, ignore_verifications, in_memory)\r\n 762 \r\n 763 # Build base dataset\r\n--> 764 ds = self._as_dataset(\r\n 765 split=split,\r\n 766 in_memory=in_memory,\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/builder.py in _as_dataset(self, split, in_memory)\r\n 838 in_memory=in_memory,\r\n 839 )\r\n--> 840 return Dataset(**dataset_kwargs)\r\n 841 \r\n 842 def _post_process(self, dataset: Dataset, resources_paths: Dict[str, str]) -> Optional[Dataset]:\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in __init__(self, arrow_table, info, split, indices_table, fingerprint)\r\n 271 assert self._fingerprint is not None, \"Fingerprint can't be None in a Dataset object\"\r\n 272 if self.info.features.type != inferred_features.type:\r\n--> 273 raise ValueError(\r\n 274 \"External features info don't match the dataset:\\nGot\\n{}\\nwith type\\n{}\\n\\nbut expected something like\\n{}\\nwith type\\n{}\".format(\r\n 275 self.info.features, self.info.features.type, inferred_features, inferred_features.type\r\n\r\nValueError: External features info don't match the dataset:\r\nGot\r\n{'id': Value(dtype='string', id=None), 'document': {'title': Value(dtype='string', id=None), 'url': Value(dtype='string', id=None), 'html': Value(dtype='string', id=None), 'tokens': Sequence(feature={'token': Value(dtype='string', id=None), 'is_html': Value(dtype='bool', id=None)}, length=-1, id=None)}, 'question': {'text': Value(dtype='string', id=None), 'tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}, 'annotations': Sequence(feature={'id': Value(dtype='string', id=None), 'long_answer': {'start_token': Value(dtype='int64', id=None), 'end_token': Value(dtype='int64', id=None), 'start_byte': Value(dtype='int64', id=None), 'end_byte': Value(dtype='int64', id=None)}, 'short_answers': Sequence(feature={'start_token': Value(dtype='int64', id=None), 'end_token': Value(dtype='int64', id=None), 'start_byte': Value(dtype='int64', id=None), 'end_byte': Value(dtype='int64', id=None), 'text': Value(dtype='string', id=None)}, length=-1, id=None), 'yes_no_answer': ClassLabel(num_classes=2, names=['NO', 'YES'], names_file=None, id=None)}, length=-1, id=None)}\r\nwith type\r\nstruct, long_answer: list>, short_answers: list, end_token: list, start_byte: list, start_token: list, text: list>>, yes_no_answer: list>, document: struct, token: list>>, id: string, question: struct>>\r\n\r\nbut expected something like\r\n{'id': Value(dtype='string', id=None), 'document': {'html': Value(dtype='string', id=None), 'title': Value(dtype='string', id=None), 'tokens': {'is_html': Sequence(feature=Value(dtype='bool', id=None), length=-1, id=None), 'token': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}, 'url': Value(dtype='string', id=None)}, 'question': {'text': Value(dtype='string', id=None), 'tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}, 'annotations': {'id': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'long_answer': [{'end_byte': Value(dtype='int64', id=None), 'end_token': Value(dtype='int64', id=None), 'start_byte': Value(dtype='int64', id=None), 'start_token': Value(dtype='int64', id=None)}], 'short_answers': [{'end_byte': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'end_token': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'start_byte': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'start_token': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'text': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}], 'yes_no_answer': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None)}}\r\nwith type\r\nstruct, long_answer: list>, short_answers: list, end_token: list, start_byte: list, start_token: list, text: list>>, yes_no_answer: list>, document: struct, token: list>, url: string>, id: string, question: struct>>\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.2\r\n- Platform: Linux-5.4.72-microsoft-standard-WSL2-x86_64-with-glibc2.10\r\n- Python version: 3.8.3\r\n- PyTorch version (GPU?): 1.6.0 (False)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Using GPU in script?: No\r\n- Using distributed or parallel set-up in script?: No\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2401\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2401\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2400","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2400\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2400\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2400\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2400","id":899867212,"node_id":"MDU6SXNzdWU4OTk4NjcyMTI=","number":2400,"title":"Concatenate several datasets with removed columns is not working.","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-24T17:40:15Z","updated_at":"2021-05-25T05:52:01Z","closed_at":"2021-05-25T05:51:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nYou can't concatenate datasets when you removed columns before.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset, concatenate_datasets\r\n\r\nwikiann= load_dataset(\"wikiann\",\"en\")\r\n\r\nwikiann[\"train\"] = wikiann[\"train\"].remove_columns([\"langs\",\"spans\"])\r\nwikiann[\"test\"] = wikiann[\"test\"].remove_columns([\"langs\",\"spans\"])\r\n\r\nassert wikiann[\"train\"].features.type == wikiann[\"test\"].features.type\r\n\r\nconcate = concatenate_datasets([wikiann[\"train\"],wikiann[\"test\"]])\r\n```\r\n\r\n## Expected results\r\nMerged dataset \r\n\r\n\r\n## Actual results\r\n```python\r\nValueError: External features info don't match the dataset:\r\nGot\r\n{'tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'ner_tags': Sequence(feature=ClassLabel(num_classes=7, names=['O', 'B-PER', 'I-PER', 'B-ORG', 'I-ORG', 'B-LOC', 'I-LOC'], names_file=None, id=None), length=-1, id=None), 'langs': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'spans': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}\r\nwith type\r\nstruct, ner_tags: list, spans: list, tokens: list>\r\n\r\nbut expected something like\r\n{'ner_tags': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}\r\nwith type\r\nstruct, tokens: list>\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: ~1.6.2~ 1.5.0\r\n- Platform: macos\r\n- Python version: 3.8.5\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2400\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2400\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2399","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2399\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2399\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2399\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2399","id":899853610,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUxNDk0OTc2","number":2399,"title":"Add env variable for MAX_IN_MEMORY_DATASET_SIZE_IN_BYTES","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-24T17:19:15Z","updated_at":"2021-05-27T09:07:15Z","closed_at":"2021-05-26T16:07:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2399","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2399","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2399.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2399.patch","merged_at":"2021-05-26T16:07:54Z"},"body":"Add env variable for `MAX_IN_MEMORY_DATASET_SIZE_IN_BYTES`.\r\n\r\nThis will allow to turn off default behavior: loading in memory (and not caching) small datasets.\r\n\r\nFix #2387.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2399\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2399\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2398","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2398\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2398\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2398\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2398","id":899511837,"node_id":"MDU6SXNzdWU4OTk1MTE4Mzc=","number":2398,"title":"News_commentary Dataset Translation Pairs are of Incorrect Language Specified Pairs","user":{"login":"anassalamah","id":8571003,"node_id":"MDQ6VXNlcjg1NzEwMDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8571003?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anassalamah","html_url":"https:\/\/github.com\/anassalamah","followers_url":"https:\/\/api.github.com\/users\/anassalamah\/followers","following_url":"https:\/\/api.github.com\/users\/anassalamah\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anassalamah\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anassalamah\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anassalamah\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anassalamah\/orgs","repos_url":"https:\/\/api.github.com\/users\/anassalamah\/repos","events_url":"https:\/\/api.github.com\/users\/anassalamah\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anassalamah\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-24T10:03:34Z","updated_at":"2021-05-24T10:03:34Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I used load_dataset to load the news_commentary dataset for \"ar-en\" translation pairs but found translations from Arabic to Hindi. \r\n\r\n```\r\ntrain_ds = load_dataset(\"news_commentary\", \"ar-en\", split='train[:98%]')\r\nval_ds = load_dataset(\"news_commentary\", \"ar-en\", split='train[98%:]')\r\n\r\n# filtering out examples that are not ar-en translations but ar-hi\r\nval_ds = val_ds.filter(lambda example, indice: indice not in chain(range(1312,1327) ,range(1384,1399), range(1030,1042)), with_indices=True)\r\n```\r\n\r\n* I'm fairly new to using datasets so I might be doing something wrong","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2398\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2398\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2397","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2397\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2397\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2397\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2397","id":899427378,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUxMTMxMTY0","number":2397,"title":"Fix number of classes in indic_glue sna.bn dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-24T08:18:55Z","updated_at":"2021-05-25T16:32:16Z","closed_at":"2021-05-25T16:32:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2397","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2397","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2397.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2397.patch","merged_at":"2021-05-25T16:32:16Z"},"body":"As read in the [paper](https:\/\/www.aclweb.org\/anthology\/2020.findings-emnlp.445.pdf), Table 11.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2397\/reactions","total_count":1,"+1":0,"-1":0,"laugh":1,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2397\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2396","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2396\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2396\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2396\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2396","id":899016308,"node_id":"MDU6SXNzdWU4OTkwMTYzMDg=","number":2396,"title":"strange datasets from OSCAR corpus","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-23T13:06:02Z","updated_at":"2021-06-17T13:54:37Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"![image](https:\/\/user-images.githubusercontent.com\/50871412\/119260850-4f876b80-bc07-11eb-8894-124302600643.png)\r\n![image](https:\/\/user-images.githubusercontent.com\/50871412\/119260875-675eef80-bc07-11eb-9da4-ee27567054ac.png)\r\nFrom the [official site ](https:\/\/oscar-corpus.com\/), the Yue Chinese dataset should have 2.2KB data.\r\n7 training instances is obviously not a right number.\r\nAs I can read Yue Chinese, I call tell the last instance is definitely not something that would appear on Common Crawl.\r\nAnd even if you don't read Yue Chinese, you can tell the first six instance are problematic.\r\n(It is embarrassing, as the 7 training instances look exactly like something from a pornographic novel or flitting messages in a chat of a dating app)\r\nIt might not be the problem of the huggingface\/datasets implementation, because when I tried to download the dataset from the official site, I found out that the zip file is corrupted.\r\nI will try to inform the host of OSCAR corpus later.\r\nAwy a remake about this dataset in huggingface\/datasets is needed, perhaps after the host of the dataset fixes the issue.\r\n\r\n> Hi @jerryIsHere , sorry for the late response! Sadly this is normal, the problem comes form fasttext's classifier which we used to create the original corpus. In general the classifier is not really capable of properly recognizing Yue Chineese so the file ends un being just noise from Common Crawl. Some of these problems with OSCAR were already discussed [here](https:\/\/arxiv.org\/pdf\/2103.12028.pdf) but we are working on explicitly documenting the problems by language on our website. In fact, could please you open an issue on [our repo](https:\/\/github.com\/oscar-corpus\/oscar-website\/issues) as well so that we can track it?\r\n\r\nThanks a lot, the new post is here:\r\nhttps:\/\/github.com\/oscar-corpus\/oscar-website\/issues\/11","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2396\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2396\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2395","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2395\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2395\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2395\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2395","id":898762730,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUwNTk3NjI0","number":2395,"title":"`pretty_name` for dataset in YAML tags","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":18,"created_at":"2021-05-22T09:24:45Z","updated_at":"2021-06-24T14:14:11Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2395","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2395","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2395.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2395.patch","merged_at":null},"body":"I'm updating `pretty_name` for datasets in YAML tags as discussed with @lhoestq. Here are the first 10, please let me know if they're looking good.\r\n\r\nIf dataset has 1 config, I've added `pretty_name` as `config_name: full_name_of_dataset` as config names were `plain_text`, `default`, `squad` etc (not so important in this case) whereas when dataset has >1 configs, I've added `config_name: full_name_of_dataset+config_name` so as to let user know about the `config` here. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2395\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2395\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2392","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2392\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2392\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2392\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2392","id":898156795,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUwMDYxOTE3","number":2392,"title":"Update text classification template labels in DatasetInfo __post_init__","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-21T15:29:41Z","updated_at":"2021-05-28T11:37:35Z","closed_at":"2021-05-28T11:37:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2392","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2392","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2392.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2392.patch","merged_at":"2021-05-28T11:37:32Z"},"body":"This PR implements the idea discussed in #2389 to update the `labels` of the `TextClassification` template in the `DatasetInfo.__post_init__`. The main reason for doing so is so avoid duplicating the label definitions in both `DatasetInfo.features` and `DatasetInfo.task_templates`.\r\n\r\nTo avoid storing state in `DatasetInfo.__post_init__`, the current implementation flushes `DatasetInfo.task_templates` before the features are cast in `Dataset.prepare_for_task` (thanks to @mariosasko for this idea!).\r\n\r\nHere is an example of the current workflow:\r\n\r\n```python\r\nds1 = load_dataset(\".\/datasets\/emotion\/\")\r\n# cast features and flush templates\r\nds2 = ds1.prepare_for_task(\"text-classification\")\r\nassert ds2.info.task_templates is None\r\n```\r\n\r\nNote that if users want to pass a `TextClassification` template to `prepare_for_task`, we require them to set `TextClassification.labels` to match the dataset's features corresponding to `label_column`:\r\n\r\n```python\r\nds1 = load_dataset(\".\/datasets\/emotion\/\")\r\n# TextClassification.labels is None by default => invalid template\r\ntask = TextClassification(text_column=\"text\", label_column=\"label\")\r\n# Raises ValueError\r\nds1.prepare_for_task(task)\r\n# Specifying the labels => valid template\r\ntask = TextClassification(text_column=\"text\", label_column=\"label\", labels=['anger', 'fear', 'joy', 'love', 'sadness', 'surprise'])\r\nds1.prepare_for_task(task)\r\n```\r\n\r\nThis PR also adds:\r\n\r\n* New tests + fixed some old tests that weren't testing `assertRaises` properly\r\n* A decorator to share docstrings across common functions. This allows us to document `DatasetDict.prepare_for_task` and `Dataset.prepare_for_task` in one place.\r\n* Fixes to avoid side-effects from in-place replacements of `DatasetInfo.task_templates` in `DatasetInfo.__post_init__`. Thanks to @lhoestq for figuring this out!\r\n* Removal of `FeaturesWithLazyClassLabel` since we now create a new instance of `TextClassification` in `DatasetInfo.__post_init__` and avoid the side-effects first pointed out by @mariosasko \r\n\r\n### PR Description from original WIP \r\n\r\nHi @yjernite and @lhoestq, here's a first stab at the suggestion discussed in #2389 to update the `labels` of the `TextClassification` template in the `DatasetInfo.__post_init__`.\r\n\r\nOne problem I've spotted is that my current implementation introduces state into the `__post_init__`: \r\n\r\n* When we call `load_dataset`, `DatasetInfo.features` are the \"raw\" features without any casting so we can access the column names by the `label_column` specified in `TextClassification`\r\n* When we call `Dataset.prepare_for_task` we run into a problem because the `DatasetInfo.features` are first cast into the new schema which triggers a `KeyError` when we update the infos [here](https:\/\/github.com\/huggingface\/datasets\/blob\/8b2a78520828e0cc13c14a31f413a5395ef25110\/src\/datasets\/arrow_dataset.py#L1959).\r\n\r\nHere's an explicit example of what I mean with the stack trace appended below:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n# this works \r\nds = load_dataset(\"emotion\")\r\n# we can verify the task template is correctly set\r\nds[\"train\"].info.task_templates # returns [TextClassification(labels=('sadness', 'joy', 'love', 'anger', 'fear', 'surprise'), text_column='text', label_column='label')]\r\n# but this fails because the _post_init__ is looking for the original column names\r\nds.prepare_for_task(\"text-classification\")\r\n```\r\n```\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n in \r\n----> 1 ds.prepare_for_task(\"text-classification\")\r\n\r\n~\/git\/datasets\/src\/datasets\/dataset_dict.py in prepare_for_task(self, task)\r\n 807 \"\"\"\r\n 808 self._check_values_type()\r\n--> 809 return DatasetDict({k: dataset.prepare_for_task(task=task) for k, dataset in self.items()})\r\n\r\n~\/git\/datasets\/src\/datasets\/dataset_dict.py in (.0)\r\n 807 \"\"\"\r\n 808 self._check_values_type()\r\n--> 809 return DatasetDict({k: dataset.prepare_for_task(task=task) for k, dataset in self.items()})\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in prepare_for_task(self, task)\r\n 1421 dataset = self.remove_columns(columns_to_drop)\r\n 1422 dataset = dataset.rename_columns(column_mapping)\r\n-> 1423 dataset = dataset.cast(features=template.features)\r\n 1424 return dataset\r\n 1425 \r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in cast(self, features, batch_size, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, num_proc)\r\n 970 format = self.format\r\n 971 dataset = self.with_format(\"arrow\")\r\n--> 972 dataset = dataset.map(\r\n 973 lambda t: t.cast(schema),\r\n 974 batched=True,\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)\r\n 1583 \r\n 1584 if num_proc is None or num_proc == 1:\r\n-> 1585 return self._map_single(\r\n 1586 function=function,\r\n 1587 with_indices=with_indices,\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 173 }\r\n 174 # apply actual function\r\n--> 175 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 176 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 177 # re-apply format to the output\r\n\r\n~\/git\/datasets\/src\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 338 # Call actual function\r\n 339 \r\n--> 340 out = func(self, *args, **kwargs)\r\n 341 \r\n 342 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset)\r\n 1959 if update_data:\r\n 1960 # Create new Dataset from buffer or file\r\n-> 1961 info = self.info.copy()\r\n 1962 info.features = writer._features\r\n 1963 if buf_writer is None:\r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in copy(self)\r\n 274 \r\n 275 def copy(self) -> \"DatasetInfo\":\r\n--> 276 return self.__class__(**{k: copy.deepcopy(v) for k, v in self.__dict__.items()})\r\n 277 \r\n 278 \r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in __init__(self, description, citation, homepage, license, features, post_processed, supervised_keys, task_templates, builder_name, config_name, version, splits, download_checksums, download_size, post_processing_size, dataset_size, size_in_bytes)\r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in __post_init__(self)\r\n 174 # The reason is that Dataset.prepare_for_task calls Dataset.cast which converts the\r\n 175 # DatasetInfo.features to the new schema and thus template.label_column is no longer a valid key\r\n--> 176 object.__setattr__(template, \"labels\", tuple(self.features[template.label_column].names))\r\n 177 template.label_schema[\"labels\"] = ClassLabel(names=template.labels)\r\n 178 self.task_templates[idx] = template\r\n\r\nKeyError: 'label'\r\n```\r\n\r\nWhat do you think? I did this a bit quickly, so maybe I'm overlooking something obvious :) One thing would be to only update the labels of the task template on load, but this seems a bit hacky IMO","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2392\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2392\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2391","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2391\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2391\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2391\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2391","id":898128099,"node_id":"MDU6SXNzdWU4OTgxMjgwOTk=","number":2391,"title":"Missing original answers in kilt-TriviaQA","user":{"login":"PaulLerner","id":25532159,"node_id":"MDQ6VXNlcjI1NTMyMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25532159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulLerner","html_url":"https:\/\/github.com\/PaulLerner","followers_url":"https:\/\/api.github.com\/users\/PaulLerner\/followers","following_url":"https:\/\/api.github.com\/users\/PaulLerner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulLerner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulLerner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulLerner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulLerner\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulLerner\/repos","events_url":"https:\/\/api.github.com\/users\/PaulLerner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulLerner\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-21T14:57:07Z","updated_at":"2021-06-14T17:29:11Z","closed_at":"2021-06-14T17:29:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I previously opened an issue at https:\/\/github.com\/facebookresearch\/KILT\/issues\/42 but from the answer of @fabiopetroni it seems that the problem comes from HF-datasets\r\n\r\n## Describe the bug\r\nThe `answer` field in kilt-TriviaQA, e.g. `kilt_tasks['train_triviaqa'][0]['output']['answer']` contains a list of alternative answer which are accepted for the question. \r\nHowever it'd be nice to know the original answer to the question (the only fields in `output` are `'answer', 'meta', 'provenance'`)\r\n\r\n## How to fix\r\nIt can be fixed by retrieving the original answer from the original TriviaQA (e.g. `trivia_qa['train'][0]['answer']['value']`), perhaps at the same place as here where one retrieves the questions https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/kilt_tasks\/README.md#loading-the-kilt-knowledge-source-and-task-data\r\n\r\ncc @yjernite who previously answered to an issue about KILT and TriviaQA :)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2391\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2391\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2390","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2390\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2390\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2390\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2390","id":897903642,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ5ODQ0NjQ2","number":2390,"title":"Add check for task templates on dataset load","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-21T10:16:57Z","updated_at":"2021-05-21T15:49:09Z","closed_at":"2021-05-21T15:49:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2390","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2390","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2390.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2390.patch","merged_at":"2021-05-21T15:49:06Z"},"body":"This PR adds a check that the features of a dataset match the schema of each compatible task template.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2390\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2390\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2389","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2389\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2389\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2389\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2389","id":897822270,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ5Nzc3MDMz","number":2389,"title":"Insert task templates for text classification","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-21T08:36:26Z","updated_at":"2021-05-28T15:28:58Z","closed_at":"2021-05-28T15:26:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2389","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2389","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2389.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2389.patch","merged_at":"2021-05-28T15:26:28Z"},"body":"This PR inserts text-classification templates for datasets with the following properties:\r\n\r\n* Only one config\r\n* At most two features of `(Value, ClassLabel)` type\r\n\r\nNote that this misses datasets like `sentiment140` which only has `Value` type features - these will be handled in a separate PR","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2389\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2389\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2388","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2388\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2388\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2388\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2388","id":897767470,"node_id":"MDU6SXNzdWU4OTc3Njc0NzA=","number":2388,"title":"Incorrect URLs for some datasets","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-21T07:22:35Z","updated_at":"2021-06-04T17:39:45Z","closed_at":"2021-06-04T17:39:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIt seems that the URLs for the following datasets are invalid: \r\n\r\n- [ ] `bn_hate_speech` has been renamed: https:\/\/github.com\/rezacsedu\/Bengali-Hate-Speech-Dataset\/commit\/c67ecfc4184911e12814f6b36901f9828df8a63a\r\n- [ ] `covid_tweets_japanese` has been renamed: http:\/\/www.db.info.gifu-u.ac.jp\/covid-19-twitter-dataset\/\r\n\r\nAs a result we can no longer load these datasets using `load_dataset`. The simple fix is to rename the URL in the dataset script - will do this asap.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n# pick one of the datasets from the list above\r\nds = load_dataset(\"bn_hate_speech\")\r\n```\r\n\r\n## Expected results\r\nDataset loads without error.\r\n\r\n## Actual results\r\n```\r\nDownloading: 3.36kB [00:00, 1.07MB\/s] \r\nDownloading: 2.03kB [00:00, 678kB\/s] \r\nUsing custom data configuration default\r\nDownloading and preparing dataset bn_hate_speech\/default (download: 951.48 KiB, generated: 949.84 KiB, post-processed: Unknown size, total: 1.86 MiB) to \/Users\/lewtun\/.cache\/huggingface\/datasets\/bn_hate_speech\/default\/0.0.0\/a2dc726e511a2177523301bcad196af05d4d8a2cff30d2769ba8aacc1f5fdb5c...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 744, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 574, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 630, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/Users\/lewtun\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/bn_hate_speech\/a2dc726e511a2177523301bcad196af05d4d8a2cff30d2769ba8aacc1f5fdb5c\/bn_hate_speech.py\", line 76, in _split_generators\r\n train_path = dl_manager.download_and_extract(_URL)\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 287, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 195, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 195, in map_nested\r\n return function(data_struct)\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 218, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 281, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 621, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/rezacsedu\/Bengali-Hate-Speech-Dataset\/main\/Bengali_%20Hate_Speech_Dataset_Subset.csv\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.2.dev0\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.8\r\n- PyArrow version: 3.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2388\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2388\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2387","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2387\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2387\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2387\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2387","id":897566666,"node_id":"MDU6SXNzdWU4OTc1NjY2NjY=","number":2387,"title":"datasets 1.6 ignores cache","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":13,"created_at":"2021-05-21T00:12:58Z","updated_at":"2021-05-26T16:07:54Z","closed_at":"2021-05-26T16:07:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Moving from https:\/\/github.com\/huggingface\/transformers\/issues\/11801#issuecomment-845546612 \r\n\r\nQuoting @VictorSanh:\r\n\r\n> \r\n> I downgraded datasets to `1.5.0` and printed `tokenized_datasets.cache_files` (L335):\r\n> \r\n> > `{'train': [{'filename': '\/home\/victor\/.cache\/huggingface\/datasets\/openwebtext10k\/plain_text\/1.0.0\/3a8df094c671b4cb63ed0b41f40fb3bd855e9ce2e3765e5df50abcdfb5ec144b\/cache-c6aefe81ca4e5152.arrow'}], 'validation': [{'filename': '\/home\/victor\/.cache\/huggingface\/datasets\/openwebtext10k\/plain_text\/1.0.0\/3a8df094c671b4cb63ed0b41f40fb3bd855e9ce2e3765e5df50abcdfb5ec144b\/cache-97cf4c813e6469c6.arrow'}]}`\r\n> \r\n> while the same command with the latest version of datasets (actually starting at `1.6.0`) gives:\r\n> > `{'train': [], 'validation': []}`\r\n> \r\n\r\nI also confirm that downgrading to `datasets==1.5.0` makes things fast again - i.e. cache is used.\r\n\r\nto reproduce:\r\n```\r\nUSE_TF=0 python examples\/pytorch\/language-modeling\/run_clm.py \\\r\n --model_name_or_path gpt2 \\\r\n --dataset_name \"stas\/openwebtext-10k\" \\\r\n --output_dir output_dir \\\r\n --overwrite_output_dir \\\r\n --do_train \\\r\n --do_eval \\\r\n --max_train_samples 1000 \\\r\n --max_eval_samples 200 \\\r\n --per_device_train_batch_size 4 \\\r\n --per_device_eval_batch_size 4 \\\r\n --num_train_epochs 1 \\\r\n --warmup_steps 8 \\\r\n --block_size 64 \\\r\n --fp16 \\\r\n --report_to none\r\n```\r\n\r\nthe first time the startup is slow and some 5 tqdm bars. It shouldn't do it on consequent runs. but with `datasets>1.5.0` it rebuilds on every run.\r\n\r\n@lhoestq \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2387\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2387\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2386","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2386\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2386\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2386\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2386","id":897560049,"node_id":"MDU6SXNzdWU4OTc1NjAwNDk=","number":2386,"title":"Accessing Arrow dataset cache_files","user":{"login":"Mehrad0711","id":28717374,"node_id":"MDQ6VXNlcjI4NzE3Mzc0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28717374?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehrad0711","html_url":"https:\/\/github.com\/Mehrad0711","followers_url":"https:\/\/api.github.com\/users\/Mehrad0711\/followers","following_url":"https:\/\/api.github.com\/users\/Mehrad0711\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehrad0711\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehrad0711\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehrad0711\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehrad0711\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehrad0711\/repos","events_url":"https:\/\/api.github.com\/users\/Mehrad0711\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehrad0711\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-20T23:57:43Z","updated_at":"2021-05-21T19:18:03Z","closed_at":"2021-05-21T19:18:03Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nIn datasets 1.5.0 the following code snippet would have printed the cache_files:\r\n\r\n```\r\ntrain_data = load_dataset('conll2003', split='train', cache_dir='data')\r\nprint(train_data.cache_files[0]['filename'])\r\n\r\n```\r\n\r\nHowever, in the newest release (1.6.1), it prints an empty list.\r\n\r\nI also tried loading the dataset with `keep_in_memory=True` argument but still `cache_files` is empty.\r\n\r\nWas wondering if this is a bug or I need to pass additional arguments so I can access the cache_files.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2386\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2386\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2385","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2385\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2385\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2385\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2385","id":897206823,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ5MjM1Mjcy","number":2385,"title":"update citations","user":{"login":"adeepH","id":46108405,"node_id":"MDQ6VXNlcjQ2MTA4NDA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46108405?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adeepH","html_url":"https:\/\/github.com\/adeepH","followers_url":"https:\/\/api.github.com\/users\/adeepH\/followers","following_url":"https:\/\/api.github.com\/users\/adeepH\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adeepH\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adeepH\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adeepH\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adeepH\/orgs","repos_url":"https:\/\/api.github.com\/users\/adeepH\/repos","events_url":"https:\/\/api.github.com\/users\/adeepH\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adeepH\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-20T17:54:08Z","updated_at":"2021-05-21T12:38:18Z","closed_at":"2021-05-21T12:38:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2385","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2385","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2385.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2385.patch","merged_at":"2021-05-21T12:38:18Z"},"body":"To update citations for [Offenseval_dravidiain](https:\/\/huggingface.co\/datasets\/offenseval_dravidian)\r\n ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2385\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2385\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2384","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2384\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2384\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2384\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2384","id":896866461,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ4OTI4NTQ0","number":2384,"title":"Add args description to DatasetInfo","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-20T13:53:10Z","updated_at":"2021-05-22T09:26:16Z","closed_at":"2021-05-22T09:26:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2384","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2384","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2384.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2384.patch","merged_at":"2021-05-22T09:26:13Z"},"body":"Closes #2354 \r\n\r\nI am not sure what `post_processed` and `post_processing_size` correspond to, so have left them empty for now. I also took a guess at some of the other fields like `dataset_size` vs `size_in_bytes`, so might have misunderstood their meaning.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2384\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2384\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2383","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2383\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2383\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2383\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2383","id":895779723,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ3OTU4MTQ0","number":2383,"title":"Improve example in rounding docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-19T18:59:23Z","updated_at":"2021-05-21T12:53:22Z","closed_at":"2021-05-21T12:36:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2383","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2383","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2383.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2383.patch","merged_at":"2021-05-21T12:36:29Z"},"body":"Improves the example in the rounding subsection of the Split API docs. With this change, it should more clear what's the difference between the `closest` and the `pct1_dropremainder` rounding.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2383\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2383\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2382","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2382\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2382\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2382\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2382","id":895610216,"node_id":"MDU6SXNzdWU4OTU2MTAyMTY=","number":2382,"title":"DuplicatedKeysError: FAILURE TO GENERATE DATASET ! load_dataset('head_qa', 'en')","user":{"login":"helloworld123-lab","id":75953751,"node_id":"MDQ6VXNlcjc1OTUzNzUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75953751?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/helloworld123-lab","html_url":"https:\/\/github.com\/helloworld123-lab","followers_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/followers","following_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/orgs","repos_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/repos","events_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-19T15:49:48Z","updated_at":"2021-05-30T13:26:16Z","closed_at":"2021-05-30T13:26:16Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello everyone,\r\n\r\nI try to use head_qa dataset in [https:\/\/huggingface.co\/datasets\/viewer\/?dataset=head_qa&config=en](url)\r\n\r\n```\r\n!pip install datasets\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\r\n 'head_qa', 'en')\r\n```\r\nWhen I write above load_dataset(.), it throws the following:\r\n\r\n```\r\nDuplicatedKeysError Traceback (most recent call last)\r\n\r\n in ()\r\n 2 from datasets import load_dataset\r\n 3 dataset = load_dataset(\r\n----> 4 'head_qa', 'en')\r\n\r\n5 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in check_duplicate_keys(self)\r\n 347 for hash, key in self.hkey_record:\r\n 348 if hash in tmp_record:\r\n--> 349 raise DuplicatedKeysError(key)\r\n 350 else:\r\n 351 tmp_record.add(hash)\r\n\r\nDuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 1\r\nKeys should be unique and deterministic in nature\r\n```\r\nHow can I fix the error? Thanks\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2382\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2382\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2381","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2381\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2381\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2381\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2381","id":895588844,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ3NzkyNDcw","number":2381,"title":"add dataset card title","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-19T15:30:03Z","updated_at":"2021-05-20T18:51:40Z","closed_at":"2021-05-20T18:51:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2381","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2381","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2381.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2381.patch","merged_at":"2021-05-20T18:51:40Z"},"body":"few of them were missed by me earlier which I've added now","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2381\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2381\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2380","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2380\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2380\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2380\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2380","id":895367201,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ3NTk3NTc3","number":2380,"title":"maintain YAML structure reading from README","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-19T12:12:07Z","updated_at":"2021-05-19T13:08:38Z","closed_at":"2021-05-19T13:08:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2380","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2380","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2380.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2380.patch","merged_at":"2021-05-19T13:08:38Z"},"body":"How YAML used be loaded earlier in the string (structure of YAML was affected because of this and YAML for datasets with multiple configs was not being loaded correctly):\r\n```\r\nannotations_creators:\r\nlabeled_final:\r\n- expert-generated\r\nlabeled_swap:\r\n- expert-generated\r\nunlabeled_final:\r\n- machine-generated\r\nlanguage_creators:\r\n- machine-generated\r\nlanguages:\r\n- en\r\nlicenses:\r\n- other\r\nmultilinguality:\r\n- monolingual\r\nsize_categories:\r\nlabeled_final:\r\n- 10K\r\n- `datasets` version: datasets-1.6.2\r\n- Platform: Linux\r\n- Python version: 3.7\r\n- PyArrow version: 0.17.1, also 2.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2377\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2377\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2376","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2376\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2376\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2376\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2376","id":894852264,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ3MTU1NDE4","number":2376,"title":"Improve task api code quality","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-18T23:13:40Z","updated_at":"2021-06-02T20:39:57Z","closed_at":"2021-05-25T15:30:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2376","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2376","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2376.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2376.patch","merged_at":"2021-05-25T15:30:54Z"},"body":"Improves the code quality of the `TaskTemplate` dataclasses.\r\n\r\nChanges:\r\n* replaces `return NotImplemented` with raise `NotImplementedError` \r\n* replaces `sorted` with `len` in the uniqueness check \r\n* defines `label2id` and `id2label` in the `TextClassification` template as properties\r\n* replaces the `object.__setattr__(self, attr, value)` syntax with (IMO nicer) `self.__dict__[attr] = value`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2376\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2376\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2375","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2375\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2375\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2375\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2375","id":894655157,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ2OTg2NTcw","number":2375,"title":"Dataset Streaming","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-18T18:20:00Z","updated_at":"2021-06-23T16:35:02Z","closed_at":"2021-06-23T16:35:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2375","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2375","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2375.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2375.patch","merged_at":"2021-06-23T16:35:01Z"},"body":"# Dataset Streaming\r\n\r\n## API\r\n\r\nCurrent API is\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n# Load an IterableDataset without downloading data\r\nsnli = load_dataset(\"snli\", streaming=True)\r\n\r\n# Access examples by streaming data\r\nprint(next(iter(snli[\"train\"]))) \r\n# {'premise': 'A person on a horse jumps over a broken down airplane.',\r\n# 'hypothesis': 'A person is training his horse for a competition.',\r\n# 'label': 1}\r\n```\r\n\r\nI already implemented a few methods:\r\n- IterableDataset.map: apply transforms on-the-fly to the examples\r\n- IterableDataset.shuffle: shuffle the data _a la_ TFDS, i.e. with a shuffling buffer\r\n- IterableDataset.with_format: set the format to `\"torch\"` to get a `torch.utils.data.IterableDataset`\r\n- merge_datasets: merge two iterable datasets by alternating one or the other (you can specify the probabilities)\r\n\r\nI would love to have your opinion on the API design :)\r\n\r\n## Implementation details\r\n\r\n### Streaming\r\n\r\nData streaming is done using `fsspec` which has nice caching features.\r\n\r\nTo make dataset streaming work I extend the `open` function of dataset scripts to support opening remote files without downloading them entirely. It also works with remote compressed archives (currently only zip is supported):\r\n\r\n```python\r\n# Get a file-like object by streaming data from a remote file\r\nopen(\"https:\/\/github.com\/davidsbatista\/NER-datasets\/raw\/master\/CONLL2003\/train.txt\")\r\n\r\n# Get a file-like object by streaming data from a remote compressed archive by using the hop separator \"::\"\r\nopen(\"zip:\/\/snli_1.0_train.txt::https:\/\/nlp.stanford.edu\/projects\/snli\/snli_1.0.zip\")\r\n```\r\n\r\nI also extend the `os.path.join` function to support navigation in remote compressed archives, since it has to deal with the `\"::\"` separator. This separator is used by `fsspec`.\r\n\r\nFinally I also added a retry mechanism in case the connection fails during data streaming.\r\n\r\n### Transforms\r\n\r\nAn IterableDataset wraps an ExamplesIterable instance. There are different subclasses depending on the transforms we want to apply:\r\n- ExamplesIterable: the basic one\r\n- MappedExamplesIterable: an iterable with a `map` function applied on the fly\r\n- BufferShuffledExamplesIterable: an iterable with a shuffling buffer\r\n- CyclingMultiSourcesExamplesIterable: alternates between several ExamplesIterable\r\n- RandomlyCyclingMultiSourcesExamplesIterable: randomly alternates between several ExamplesIterable\r\n\r\n### DatasetBuilder\r\n\r\nI use the same builders as usual. I just added a new method `_get_examples_iterable_for_split` to get an ExamplesIterable for a given split. Currently only the GeneratorBasedBuilder and the ArrowBasedBuilder implement it.\r\n\r\nThe BeamBasedBuilder doesn't implement it yet.\r\nIt means that datasets like wikipedia and natural_questions can't be loaded as IterableDataset for now.\r\n\r\n## Other details\r\n\r\nI may have to do some changes in many dataset script to use `download` instead of `download_and_extract` when extraction is not needed. This will avoid errors for streaming.<\/s>\r\n\r\nEDIT: Actually I just check for the extension of the file to do extraction only if needed.\r\n\r\nEDIT2: It's not possible to stream from .tar.gz files without downloading the file completely. For now I raise an error if one want to get a streaming dataset based on .tar.gz files.\r\n\r\n## TODO\r\n\r\nusual stuff:\r\n\r\n- [x] make streaming dependency \"aiohttp\" optional: `pip install datasets[streaming]`\r\n- [x] tests\r\n- [x] docs","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2375\/reactions","total_count":6,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":6,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2375\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2374","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2374\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2374\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2374\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2374","id":894579364,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ2OTIyMjkw","number":2374,"title":"add `desc` to `tqdm` in `Dataset.map()`","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-18T16:44:29Z","updated_at":"2021-05-27T15:44:04Z","closed_at":"2021-05-26T14:59:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2374","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2374","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2374.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2374.patch","merged_at":"2021-05-26T14:59:21Z"},"body":"Fixes #2330. Please let me know if anything is also required in this ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2374\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2374\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2373","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2373\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2373\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2373\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2373","id":894499909,"node_id":"MDU6SXNzdWU4OTQ0OTk5MDk=","number":2373,"title":"Loading dataset from local path","user":{"login":"kolakows","id":34172905,"node_id":"MDQ6VXNlcjM0MTcyOTA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34172905?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kolakows","html_url":"https:\/\/github.com\/kolakows","followers_url":"https:\/\/api.github.com\/users\/kolakows\/followers","following_url":"https:\/\/api.github.com\/users\/kolakows\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kolakows\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kolakows\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kolakows\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kolakows\/orgs","repos_url":"https:\/\/api.github.com\/users\/kolakows\/repos","events_url":"https:\/\/api.github.com\/users\/kolakows\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kolakows\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-18T15:20:50Z","updated_at":"2021-05-18T15:36:36Z","closed_at":"2021-05-18T15:36:35Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to load a local dataset with the code below\r\n\r\n```\r\nds = datasets.load_dataset('my_script.py', \r\n data_files='corpus.txt', \r\n data_dir='\/data\/dir', \r\n cache_dir='.')\r\n```\r\nBut internally a BuilderConfig is created, which tries to use getmtime on the data_files string, without using data_dir. Is this a bug or am I not using the load_dataset correctly?\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/bc61954083f74e6460688202e9f77dde2475319c\/src\/datasets\/builder.py#L153","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2373\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2373\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2372","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2372\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2372\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2372\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2372","id":894496064,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ2ODUxODc2","number":2372,"title":"ConvQuestions benchmark added","user":{"login":"PhilippChr","id":24608689,"node_id":"MDQ6VXNlcjI0NjA4Njg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24608689?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilippChr","html_url":"https:\/\/github.com\/PhilippChr","followers_url":"https:\/\/api.github.com\/users\/PhilippChr\/followers","following_url":"https:\/\/api.github.com\/users\/PhilippChr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilippChr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilippChr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilippChr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilippChr\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilippChr\/repos","events_url":"https:\/\/api.github.com\/users\/PhilippChr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilippChr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-18T15:16:50Z","updated_at":"2021-05-26T10:31:45Z","closed_at":"2021-05-26T10:31:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2372","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2372","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2372.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2372.patch","merged_at":"2021-05-26T10:31:45Z"},"body":"Hello,\r\nI would like to integrate our dataset on conversational QA. The answers are grounded in the KG.\r\nThe work was published in CIKM 2019 (https:\/\/dl.acm.org\/doi\/10.1145\/3357384.3358016).\r\nWe hope for further research on how to deal with the challenges of factoid conversational QA.\r\nThanks! :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2372\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2372\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2371","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2371\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2371\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2371\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2371","id":894193403,"node_id":"MDU6SXNzdWU4OTQxOTM0MDM=","number":2371,"title":"Align question answering tasks with sub-domains","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-18T09:47:59Z","updated_at":"2021-05-18T09:49:22Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As pointed out by @thomwolf in #2255 we should consider breaking with the pipeline taxonomy of `transformers` to account for the various types of question-answering domains:\r\n\r\n> `question-answering` exists in two forms: abstractive and extractive question answering.\r\n> \r\n> we can keep a generic `question-answering` but then it will probably mean diferrent schema of input\/output for both (abstractive will have text for both while extractive can use spans indication as well as text).\r\n> \r\n> Or we can also propose to use `abstractive-question-answering` and `extractive-question-answering` for instance.\r\n> Maybe we could have `question-answering-abstractive` and `question-answering-extractive` if somehow we can use a for a completion or search in the future (detail).\r\n> Actually I see that people are more organizing in terms of general and sub-tasks, for instance on paperwithcode: https:\/\/paperswithcode.com\/area\/natural-language-processing and on nlpprogress: https:\/\/github.com\/sebastianruder\/NLP-progress\/blob\/master\/english\/question_answering.md#squad\r\n> \r\n> Probably the best is to align with one of these in terms of denomination, PaperWithCode is probably the most active and maintained and we work with them as well.\r\n> Maybe you want to check with a few QA datasets that this schema make sense. Typically NaturalQuestions, TriviaQA and can be good second datasets to compare to and be sure of the generality of the schema.\r\n> \r\n> A good recent list of QA datasets to compare the schemas among, is for instance in the UnitedQA paper: https:\/\/arxiv.org\/abs\/2101.00178\r\n\r\nInvestigate which grouping of QA is best suited for `datasets` and adapt \/ extend the QA task template accordingly.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2371\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2371\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2370","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2370\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2370\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2370\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2370","id":893606432,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ2MDkyNDQy","number":2370,"title":"Adding HendrycksTest dataset","user":{"login":"andyzoujm","id":43451571,"node_id":"MDQ6VXNlcjQzNDUxNTcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43451571?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/andyzoujm","html_url":"https:\/\/github.com\/andyzoujm","followers_url":"https:\/\/api.github.com\/users\/andyzoujm\/followers","following_url":"https:\/\/api.github.com\/users\/andyzoujm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/andyzoujm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/andyzoujm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/andyzoujm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/andyzoujm\/orgs","repos_url":"https:\/\/api.github.com\/users\/andyzoujm\/repos","events_url":"https:\/\/api.github.com\/users\/andyzoujm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/andyzoujm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-17T18:53:05Z","updated_at":"2021-05-31T16:37:13Z","closed_at":"2021-05-31T16:37:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2370","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2370","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2370.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2370.patch","merged_at":"2021-05-31T16:37:13Z"},"body":"Adding Hendrycks test from https:\/\/arxiv.org\/abs\/2009.03300.\r\nI'm having a bit of trouble with dummy data creation because some lines in the csv files aren't being loaded properly (only the first entry loaded in a row of length 6). The dataset is loading just fine. Hope you can kindly help!\r\nThank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2370\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2370\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2369","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2369\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2369\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2369\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2369","id":893554153,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ2MDQ5NDM1","number":2369,"title":"correct labels of conll2003","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-17T17:37:54Z","updated_at":"2021-05-18T08:27:42Z","closed_at":"2021-05-18T08:27:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2369","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2369","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2369.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2369.patch","merged_at":"2021-05-18T08:27:42Z"},"body":"# What does this PR\r\n\r\nIt fixes\/extends the `ner_tags` for conll2003 to include all. \r\nPaper reference https:\/\/arxiv.org\/pdf\/cs\/0306050v1.pdf\r\nModel reference https:\/\/huggingface.co\/elastic\/distilbert-base-cased-finetuned-conll03-english\/blob\/main\/config.json \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2369\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2369\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2368","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2368\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2368\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2368\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2368","id":893411076,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ1OTI5NzM0","number":2368,"title":"Allow \"other-X\" in licenses","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-17T14:47:54Z","updated_at":"2021-05-17T16:36:27Z","closed_at":"2021-05-17T16:36:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2368","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2368","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2368.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2368.patch","merged_at":"2021-05-17T16:36:27Z"},"body":"This PR allows \"other-X\" licenses during metadata validation.\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2368\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2368\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2367","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2367\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2367\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2367\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2367","id":893317427,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ1ODUxNTE0","number":2367,"title":"Remove getchildren from hyperpartisan news detection","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-17T13:10:37Z","updated_at":"2021-05-17T14:07:13Z","closed_at":"2021-05-17T14:07:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2367","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2367","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2367.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2367.patch","merged_at":"2021-05-17T14:07:12Z"},"body":"`Element.getchildren()` is now deprecated in the ElementTree library (I think in python 3.9, so it still passes the automated tests which are using 3.6. But for those of us on bleeding-edge distros it now fails).\r\n\r\nhttps:\/\/bugs.python.org\/issue29209","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2367\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2367\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2366","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2366\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2366\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2366\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2366","id":893185266,"node_id":"MDU6SXNzdWU4OTMxODUyNjY=","number":2366,"title":"Json loader fails if user-specified features don't match the json data fields order","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-17T10:26:08Z","updated_at":"2021-06-16T10:47:49Z","closed_at":"2021-06-16T10:47:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"If you do\r\n```python\r\ndataset = load_dataset(\"json\", data_files=data_files, features=features)\r\n```\r\nThen depending on the order of the features in the json data field it fails:\r\n```python\r\n[...]\r\n~\/Desktop\/hf\/datasets\/src\/datasets\/packaged_modules\/json\/json.py in _generate_tables(self, files)\r\n 94 if self.config.schema:\r\n 95 # Cast allows str <-> int\/float, while parse_option explicit_schema does NOT\r\n---> 96 pa_table = pa_table.cast(self.config.schema)\r\n 97 yield i, pa_table\r\n[...]\r\nValueError: Target schema's field names are not matching the table's field names: ['tokens', 'ner_tags'], ['ner_tags', 'tokens']\r\n```\r\n\r\nThis is because one must first re-order the columns of the table to match the `self.config.schema` before calling cast.\r\n\r\nOne way to fix the `cast` would be to replace it with:\r\n```python\r\n# reorder the arrays if necessary + cast to schema\r\n# we can't simply use .cast here because we may need to change the order of the columns\r\npa_table = pa.Table.from_arrays([pa_table[name] for name in schema.names], schema=schema)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2366\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2366\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2365","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2365\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2365\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2365\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2365","id":893179697,"node_id":"MDU6SXNzdWU4OTMxNzk2OTc=","number":2365,"title":"Missing ClassLabel encoding in Json loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":0,"created_at":"2021-05-17T10:19:10Z","updated_at":"2021-06-28T15:05:34Z","closed_at":"2021-06-28T15:05:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently if you want to load a json dataset this way\r\n```python\r\ndataset = load_dataset(\"json\", data_files=data_files, features=features)\r\n```\r\nThen if your features has ClassLabel types and if your json data needs class label encoding (i.e. if the labels in the json files are strings and not integers), then it would fail:\r\n```python\r\n[...]\r\n~\/Desktop\/hf\/datasets\/src\/datasets\/packaged_modules\/json\/json.py in _generate_tables(self, files)\r\n 94 if self.config.schema:\r\n 95 # Cast allows str <-> int\/float, while parse_option explicit_schema does NOT\r\n---> 96 pa_table = pa_table.cast(self.config.schema)\r\n 97 yield i, pa_table\r\n[...]\r\nArrowInvalid: Failed to parse string: 'O' as a scalar of type int64\r\n```\r\n\r\nThis is because it just tries to cast the string data to integers, without applying the mapping str->int first\r\n\r\nThe current workaround is to do instead\r\n```python\r\ndataset = load_dataset(\"json\", data_files=data_files)\r\ndataset = dataset.map(features.encode_example, features=features)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2365\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2365\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2364","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2364\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2364\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2364\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2364","id":892420500,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ1MTI4MDYx","number":2364,"title":"README updated for SNLI, MNLI","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-15T11:37:59Z","updated_at":"2021-05-17T14:14:27Z","closed_at":"2021-05-17T13:34:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2364","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2364","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2364.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2364.patch","merged_at":"2021-05-17T13:34:18Z"},"body":"Closes #2275. Mentioned about -1 labels in MNLI, SNLI and how they should be removed before training. @lhoestq `check_code_quality` test might fail for MNLI as the license name `other-Open Portion of the American National Corpus` is not a registered tag for 'licenses'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2364\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2364\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2363","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2363\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2363\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2363\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2363","id":892391232,"node_id":"MDU6SXNzdWU4OTIzOTEyMzI=","number":2363,"title":"Trying to use metric.compute but get OSError","user":{"login":"hyusterr","id":52968111,"node_id":"MDQ6VXNlcjUyOTY4MTEx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52968111?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hyusterr","html_url":"https:\/\/github.com\/hyusterr","followers_url":"https:\/\/api.github.com\/users\/hyusterr\/followers","following_url":"https:\/\/api.github.com\/users\/hyusterr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hyusterr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hyusterr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hyusterr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hyusterr\/orgs","repos_url":"https:\/\/api.github.com\/users\/hyusterr\/repos","events_url":"https:\/\/api.github.com\/users\/hyusterr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hyusterr\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-15T08:39:06Z","updated_at":"2021-09-06T14:01:06Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I want to use metric.compute from load_metric('accuracy') to get training accuracy, but receive OSError. I am wondering what is the mechanism behind the metric calculation, why would it report an OSError?\r\n\r\n```python\r\n195 for epoch in range(num_train_epochs):\r\n196 model.train()\r\n197 for step, batch in enumerate(train_loader):\r\n198 # print(batch['input_ids'].shape)\r\n199 outputs = model(**batch)\r\n200\r\n201 loss = outputs.loss\r\n202 loss \/= gradient_accumulation_steps\r\n203 accelerator.backward(loss)\r\n204\r\n205 predictions = outputs.logits.argmax(dim=-1)\r\n206 metric.add_batch(\r\n207 predictions=accelerator.gather(predictions),\r\n208 references=accelerator.gather(batch['labels'])\r\n209 )\r\n210 progress_bar.set_postfix({'loss': loss.item(), 'train batch acc.': train_metrics})\r\n211\r\n212 if (step + 1) % 50 == 0 or step == len(train_loader) - 1:\r\n213 train_metrics = metric.compute()\r\n```\r\n\r\nthe error message is as below:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"run_multi.py\", line 273, in \r\n main()\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/click\/core.py\", line 829, in __call__\r\n return self.main(*args, **kwargs)\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/click\/core.py\", line 782, in main\r\n rv = self.invoke(ctx)\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/click\/core.py\", line 1066, in invoke\r\n return ctx.invoke(self.callback, **ctx.params)\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/click\/core.py\", line 610, in invoke\r\n return callback(*args, **kwargs)\r\n File \"run_multi.py\", line 213, in main\r\n train_metrics = metric.compute()\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/datasets\/metric.py\", line 391, in compute\r\n self._finalize()\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/datasets\/metric.py\", line 342, in _finalize\r\n self.writer.finalize()\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py\", line 370, in finalize\r\n self.stream.close()\r\n File \"pyarrow\/io.pxi\", line 132, in pyarrow.lib.NativeFile.close\r\n File \"pyarrow\/error.pxi\", line 99, in pyarrow.lib.check_status\r\nOSError: error closing file\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.1\r\n- Platform: Linux NAME=\"Ubuntu\" VERSION=\"20.04.1 LTS (Focal Fossa)\"\r\n- Python version: python3.8.5\r\n- PyArrow version: 4.0.0\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2363\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2363\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2362","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2362\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2362\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2362\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2362","id":892100749,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ0ODYzOTQw","number":2362,"title":"Fix web_nlg metadata","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-14T17:15:07Z","updated_at":"2021-05-17T13:44:17Z","closed_at":"2021-05-17T13:42:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2362","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2362","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2362.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2362.patch","merged_at":null},"body":"Our metadata storage system does not support `.` inside keys. cc @Pierrci \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2362\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2362\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2361","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2361\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2361\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2361\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2361","id":891982808,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ0NzYzNTU4","number":2361,"title":"Preserve dtype for numpy\/torch\/tf\/jax arrays","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-14T14:45:23Z","updated_at":"2021-08-17T08:30:04Z","closed_at":"2021-08-17T08:30:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2361","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2361","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2361.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2361.patch","merged_at":"2021-08-17T08:30:04Z"},"body":"Fixes #625. This lets the user preserve the dtype of numpy array to pyarrow array which was getting lost due to conversion of numpy array -> list -> pyarrow array. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2361\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2361\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2360","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2360\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2360\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2360\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2360","id":891965964,"node_id":"MDU6SXNzdWU4OTE5NjU5NjQ=","number":2360,"title":"Automatically detect datasets with compatible task schemas","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-14T14:23:40Z","updated_at":"2021-05-14T14:23:40Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"See description of #2255 for details.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2360\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2360\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2359","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2359\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2359\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2359\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2359","id":891946017,"node_id":"MDU6SXNzdWU4OTE5NDYwMTc=","number":2359,"title":"Allow model labels to be passed during task preparation","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-14T13:58:28Z","updated_at":"2021-05-14T13:58:28Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Models have a config with label2id. And we have the same for datasets with the ClassLabel feature type. At one point either the model or the dataset must sync with the other. It would be great to do that on the dataset side.\r\n\r\nFor example for sentiment classification on amazon reviews with you could have these labels:\r\n- \"1 star\", \"2 stars\", \"3 stars\", \"4 stars\", \"5 stars\"\r\n- \"1\", \"2\", \"3\", \"4\", \"5\"\r\n\r\nSome models may use the first set, while other models use the second set.\r\n\r\nHere in the `TextClassification` class, the user can only specify one set of labels, while many models could actually be compatible but have different sets of labels. Should we allow users to pass a list of compatible labels sets ?\r\n\r\nThen in terms of API, users could use `dataset.prepare_for_task(\"text-classification\", labels=model.labels)` or something like that.\r\n\r\nThe label set could also be the same but not in the same order. For NLI for example, some models use `[\"neutral\", \"entailment\", \"contradiction\"]` and some others use `[\"neutral\", \"contradiction\", \"entailment\"]`, so we should take care of updating the order of the labels in the dataset to match the labels order of the model.\r\n\r\nLet me know what you think ! This can be done in a future PR\r\n\r\n_Originally posted by @lhoestq in https:\/\/github.com\/huggingface\/datasets\/pull\/2255#discussion_r632412792_","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2359\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2359\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2358","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2358\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2358\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2358\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2358","id":891269577,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ0MTYyOTY2","number":2358,"title":"Roman Urdu Stopwords List","user":{"login":"devzohaib","id":58664161,"node_id":"MDQ6VXNlcjU4NjY0MTYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/58664161?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/devzohaib","html_url":"https:\/\/github.com\/devzohaib","followers_url":"https:\/\/api.github.com\/users\/devzohaib\/followers","following_url":"https:\/\/api.github.com\/users\/devzohaib\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/devzohaib\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/devzohaib\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/devzohaib\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/devzohaib\/orgs","repos_url":"https:\/\/api.github.com\/users\/devzohaib\/repos","events_url":"https:\/\/api.github.com\/users\/devzohaib\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/devzohaib\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-13T18:29:27Z","updated_at":"2021-05-19T08:50:43Z","closed_at":"2021-05-17T14:05:10Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2358","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2358","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2358.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2358.patch","merged_at":null},"body":"A list of most frequently used Roman Urdu words with different spellings and usages.\r\nThis is a very basic effort to collect some basic stopwords for Roman Urdu to help efforts of analyzing text data in roman Urdu which makes up a huge part of daily internet interaction of Roman-Urdu users.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2358\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2358\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2357","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2357\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2357\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2357\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2357","id":890595693,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQzNTk0NDcz","number":2357,"title":"Adding Microsoft CodeXGlue Datasets","user":{"login":"ncoop57","id":7613470,"node_id":"MDQ6VXNlcjc2MTM0NzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7613470?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ncoop57","html_url":"https:\/\/github.com\/ncoop57","followers_url":"https:\/\/api.github.com\/users\/ncoop57\/followers","following_url":"https:\/\/api.github.com\/users\/ncoop57\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ncoop57\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ncoop57\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ncoop57\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ncoop57\/orgs","repos_url":"https:\/\/api.github.com\/users\/ncoop57\/repos","events_url":"https:\/\/api.github.com\/users\/ncoop57\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ncoop57\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2021-05-13T00:43:01Z","updated_at":"2021-06-08T09:29:57Z","closed_at":"2021-06-08T09:29:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2357","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2357","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2357.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2357.patch","merged_at":"2021-06-08T09:29:57Z"},"body":"Hi there, this is a new pull request to get the CodeXGlue datasets into the awesome HF datasets lib. Most of the work has been done in this PR #997 by the awesome @madlag. However, that PR has been stale for a while now and so I spoke with @lhoestq about finishing up the final mile and so he told me to open a new PR with the final changes :smile:. \r\n\r\nI believe I've met all of the changes still left in the old PR to do, except for the change to the languages. I believe the READMEs should include the different programming languages used rather than just using the tag \"code\" as when searching for datasets, SE researchers may specifically be looking only for what type of programming language and so being able to quickly filter will be very valuable. Let me know what you think of that or if you still believe it should be the \"code\" tag @lhoestq.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2357\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2357\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2356","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2356\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2356\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2356\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2356","id":890511019,"node_id":"MDU6SXNzdWU4OTA1MTEwMTk=","number":2356,"title":"How to Add New Metrics Guide","user":{"login":"ncoop57","id":7613470,"node_id":"MDQ6VXNlcjc2MTM0NzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7613470?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ncoop57","html_url":"https:\/\/github.com\/ncoop57","followers_url":"https:\/\/api.github.com\/users\/ncoop57\/followers","following_url":"https:\/\/api.github.com\/users\/ncoop57\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ncoop57\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ncoop57\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ncoop57\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ncoop57\/orgs","repos_url":"https:\/\/api.github.com\/users\/ncoop57\/repos","events_url":"https:\/\/api.github.com\/users\/ncoop57\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ncoop57\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-12T21:42:06Z","updated_at":"2021-05-31T18:49:35Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nCurrently there is an absolutely fantastic guide for how to contribute a new dataset to the library. However, there isn't one for adding new metrics.\r\n\r\n**Describe the solution you'd like**\r\nI'd like for a guide in a similar style to the dataset guide for adding metrics. I believe many of the content in the dataset guide such as setup can be easily copied over with minimal changes. Also, from what I've seen with existing metrics, it shouldn't be as complicated, especially in documentation of the metric, mainly just citation and usage. The most complicated part I see would be in automated tests that run the new metrics, but y'all's test suite seem pretty comprehensive, so it might not be that hard.\r\n\r\n**Describe alternatives you've considered**\r\nOne alternative would be just not having the metrics be community generated and so would not need a step by step guide. New metrics would just be proposed as issues and the internal team would take care of them. However, I think it makes more sense to have a step by step guide for contributors to follow.\r\n\r\n**Additional context**\r\nI'd be happy to help with creating this guide as I am very interested in adding software engineering metrics to the library :nerd_face:, the part I would need guidance on would be testing.\r\n\r\nP.S. Love the library and community y'all have built! :hugs: \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2356\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2356\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2355","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2355\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2355\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2355\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2355","id":890484408,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQzNDk5NTIz","number":2355,"title":"normalized TOCs and titles in data cards","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-12T20:59:59Z","updated_at":"2021-05-14T13:23:12Z","closed_at":"2021-05-14T13:23:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2355","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2355","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2355.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2355.patch","merged_at":"2021-05-14T13:23:12Z"},"body":"I started fixing some of the READMEs that were failing the tests introduced by @gchhablani but then realized that there were some consistent differences between earlier and newer versions of some of the titles (e.g. Data Splits vs Data Splits Sample Size, Supported Tasks vs Supported Tasks and Leaderboards). We also had different versions of the Table of Content\r\n\r\nThis PR normalizes all of them to the newer version","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2355\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2355\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2354","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2354\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2354\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2354\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2354","id":890439523,"node_id":"MDU6SXNzdWU4OTA0Mzk1MjM=","number":2354,"title":"Document DatasetInfo attributes","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-12T20:01:29Z","updated_at":"2021-05-22T09:26:14Z","closed_at":"2021-05-22T09:26:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nAs noted in PR #2255, the attributes of `DatasetInfo` are not documented in the [docs](https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html?highlight=datasetinfo#datasetinfo). It would be nice to do so :)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2354\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2354\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2353","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2353\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2353\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2353\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2353","id":890296262,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQzMzM4MDcz","number":2353,"title":"Update README vallidation rules","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-12T16:57:26Z","updated_at":"2021-05-14T08:56:06Z","closed_at":"2021-05-14T08:56:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2353","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2353","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2353.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2353.patch","merged_at":"2021-05-14T08:56:06Z"},"body":"This PR allows unexpected subsections under third-level headings. All except `Contributions`.\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2353\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2353\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2352","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2352\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2352\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2352\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2352","id":889810100,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQyOTI4NTgz","number":2352,"title":"Set to_json default to JSON lines","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-12T08:19:25Z","updated_at":"2021-05-21T09:01:14Z","closed_at":"2021-05-21T09:01:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2352","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2352","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2352.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2352.patch","merged_at":"2021-05-21T09:01:13Z"},"body":"With this PR, the method `Dataset.to_json`:\r\n- is added to the docs\r\n- defaults to JSON lines","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2352\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2352\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2351","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2351\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2351\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2351\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2351","id":889584953,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQyNzI5NDIz","number":2351,"title":"simpllify faiss index save","user":{"login":"Guitaricet","id":2821124,"node_id":"MDQ6VXNlcjI4MjExMjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2821124?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Guitaricet","html_url":"https:\/\/github.com\/Guitaricet","followers_url":"https:\/\/api.github.com\/users\/Guitaricet\/followers","following_url":"https:\/\/api.github.com\/users\/Guitaricet\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Guitaricet\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Guitaricet\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Guitaricet\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Guitaricet\/orgs","repos_url":"https:\/\/api.github.com\/users\/Guitaricet\/repos","events_url":"https:\/\/api.github.com\/users\/Guitaricet\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Guitaricet\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-12T03:54:10Z","updated_at":"2021-05-17T13:41:41Z","closed_at":"2021-05-17T13:41:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2351","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2351","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2351.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2351.patch","merged_at":"2021-05-17T13:41:41Z"},"body":"Fixes #2350\r\n\r\nIn some cases, Faiss GPU index objects do not have neither \"device\" nor \"getDevice\". Possibly this happens when some part of the index is computed on CPU.\r\n\r\nIn particular, this would happen with the index `OPQ16_128,IVF512,PQ32` (issue #2350). I did check it, but it is likely that `OPQ` or `PQ` transforms cause it.\r\n\r\nI propose, instead of using the index object to get the device, to infer it form the `FaissIndex.device` field as it is done in `.add_vectors`. Here we assume that `.device` always corresponds to the index placement and it seems reasonable. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2351\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2351\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2350","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2350\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2350\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2350\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2350","id":889580247,"node_id":"MDU6SXNzdWU4ODk1ODAyNDc=","number":2350,"title":"`FaissIndex.save` throws error on GPU","user":{"login":"Guitaricet","id":2821124,"node_id":"MDQ6VXNlcjI4MjExMjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2821124?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Guitaricet","html_url":"https:\/\/github.com\/Guitaricet","followers_url":"https:\/\/api.github.com\/users\/Guitaricet\/followers","following_url":"https:\/\/api.github.com\/users\/Guitaricet\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Guitaricet\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Guitaricet\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Guitaricet\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Guitaricet\/orgs","repos_url":"https:\/\/api.github.com\/users\/Guitaricet\/repos","events_url":"https:\/\/api.github.com\/users\/Guitaricet\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Guitaricet\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-12T03:41:56Z","updated_at":"2021-05-17T13:41:41Z","closed_at":"2021-05-17T13:41:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nAfter training an index with a factory string `OPQ16_128,IVF512,PQ32` on GPU, `.save_faiss_index` throws this error.\r\n\r\n```\r\n File \"index_wikipedia.py\", line 119, in \r\n data[\"train\"].save_faiss_index(\"text_emb\", index_save_path)\r\n File \"\/home\/vlialin\/miniconda3\/envs\/cat\/lib\/python3.8\/site-packages\/datasets\/search.py\", line 470, in save_faiss_index\r\n index.save(file)\r\n File \"\/home\/vlialin\/miniconda3\/envs\/cat\/lib\/python3.8\/site-packages\/datasets\/search.py\", line 334, in save\r\n faiss.write_index(index, str(file))\r\n File \"\/home\/vlialin\/miniconda3\/envs\/cat\/lib\/python3.8\/site-packages\/faiss\/swigfaiss_avx2.py\", line 5654, in write_index\r\n return _swigfaiss.write_index(*args)\r\nRuntimeError: Error in void faiss::write_index(const faiss::Index*, faiss::IOWriter*) at \/root\/miniconda3\/conda-bld\/faiss-pkg_1613235005464\/work\/faiss\/impl\/index_write.cpp:453: don't know how to serialize this type of index\r\n```\r\n\r\n## Steps to reproduce the bug\r\n\r\nAny dataset will do, I just selected a familiar one.\r\n\r\n```python\r\nimport numpy as np\r\nimport datasets\r\nINDEX_STR = \"OPQ16_128,IVF512,PQ32\"\r\nINDEX_SAVE_PATH = \"will_not_save.faiss\"\r\n\r\ndata = datasets.load_dataset(\"Fraser\/news-category-dataset\", split=f\"train[:10000]\")\r\n\r\ndef encode(item):\r\n return {\"text_emb\": np.random.randn(768).astype(np.float32)}\r\n\r\ndata = data.map(encode)\r\n\r\ndata.add_faiss_index(column=\"text_emb\", string_factory=INDEX_STR, train_size=10_000, device=0)\r\ndata.save_faiss_index(\"text_emb\", INDEX_SAVE_PATH)\r\n```\r\n\r\n## Expected results\r\nSaving the index\r\n\r\n## Actual results\r\nError in void faiss::write_index(const faiss::Index*, faiss::IOWriter*) ... don't know how to serialize this type of index\r\n\r\n## Environment info\r\n- `datasets` version: 1.6.2\r\n- Platform: Linux-4.15.0-142-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.8\r\n- PyTorch version (GPU?): 1.8.1+cu111 (True)\r\n- Tensorflow version (GPU?): 2.2.0 (False)\r\n- Using GPU in script?: Yes\r\n- Using distributed or parallel set-up in script?: No\r\n\r\n\r\nI will be proposing a fix in a couple of minutes","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2350\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2350\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2349","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2349\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2349\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2349\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2349","id":888586018,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQxNzYzNzg3","number":2349,"title":"Update task_ids for Ascent KB","user":{"login":"phongnt570","id":6749421,"node_id":"MDQ6VXNlcjY3NDk0MjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6749421?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/phongnt570","html_url":"https:\/\/github.com\/phongnt570","followers_url":"https:\/\/api.github.com\/users\/phongnt570\/followers","following_url":"https:\/\/api.github.com\/users\/phongnt570\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/phongnt570\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/phongnt570\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/phongnt570\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/phongnt570\/orgs","repos_url":"https:\/\/api.github.com\/users\/phongnt570\/repos","events_url":"https:\/\/api.github.com\/users\/phongnt570\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/phongnt570\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-11T20:44:33Z","updated_at":"2021-05-17T10:53:14Z","closed_at":"2021-05-17T10:48:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2349","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2349","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2349.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2349.patch","merged_at":"2021-05-17T10:48:34Z"},"body":"This \"other-other-knowledge-base\" task is better suited for the dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2349\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2349\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2348","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2348\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2348\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2348\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2348","id":887927737,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQxMTMwOTM4","number":2348,"title":"Add tests for dataset cards","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-11T17:14:27Z","updated_at":"2021-05-21T12:10:47Z","closed_at":"2021-05-21T12:10:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2348","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2348","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2348.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2348.patch","merged_at":"2021-05-21T12:10:47Z"},"body":"Adding tests for dataset cards\r\n\r\nThis PR will potentially remove the scripts being used for dataset tags and readme validation.\r\n\r\nAdditionally, this will allow testing dataset readmes by providing the name as follows:\r\n\r\n```bash\r\npytest tests\/test_dataset_cards.py::test_dataset_tags[fashion_mnist]\r\n```\r\nand\r\n\r\n```bash\r\npytest tests\/test_dataset_cards.py::test_readme_content[fashion_mnist]\r\n```\r\nor a combined test as:\r\n\r\n```bash\r\npytest tests\/test_dataset_cards.py::test_dataset_card[fashion_mnist]\r\n```\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2348\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2348\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2347","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2347\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2347\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2347\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2347","id":887404868,"node_id":"MDU6SXNzdWU4ODc0MDQ4Njg=","number":2347,"title":"Add an API to access the language and pretty name of a dataset","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-11T14:10:08Z","updated_at":"2021-05-21T09:26:46Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It would be super nice to have an API to get some metadata of the dataset from the name and args passed to `load_dataset`. This way we could programmatically infer the language and the name of a dataset when creating model cards automatically in the Transformers examples scripts.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2347\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2347\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2346","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2346\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2346\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2346\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2346","id":886632114,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM5OTAzMjk3","number":2346,"title":"Add Qasper Dataset","user":{"login":"cceyda","id":15624271,"node_id":"MDQ6VXNlcjE1NjI0Mjcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15624271?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cceyda","html_url":"https:\/\/github.com\/cceyda","followers_url":"https:\/\/api.github.com\/users\/cceyda\/followers","following_url":"https:\/\/api.github.com\/users\/cceyda\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cceyda\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cceyda\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cceyda\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cceyda\/orgs","repos_url":"https:\/\/api.github.com\/users\/cceyda\/repos","events_url":"https:\/\/api.github.com\/users\/cceyda\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cceyda\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-11T09:25:44Z","updated_at":"2021-05-18T12:28:28Z","closed_at":"2021-05-18T12:28:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2346","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2346","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2346.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2346.patch","merged_at":"2021-05-18T12:28:27Z"},"body":"[Question Answering on Scientific Research Papers](https:\/\/allenai.org\/project\/qasper\/home)\r\n\r\nDoing NLP on NLP papers to do NLP \u267b\ufe0f I had to add it~\r\n\r\n- [x] Add README (just gotta fill out some more )\r\n- [x] Dataloader code\r\n- [x] Make dummy dataset\r\n- [x] generate dataset infos\r\n- [x] Tests\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2346\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2346\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2345","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2345\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2345\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2345\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2345","id":886586872,"node_id":"MDU6SXNzdWU4ODY1ODY4NzI=","number":2345,"title":"[Question] How to move and reuse preprocessed dataset? ","user":{"login":"AtmaHou","id":15045402,"node_id":"MDQ6VXNlcjE1MDQ1NDAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15045402?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AtmaHou","html_url":"https:\/\/github.com\/AtmaHou","followers_url":"https:\/\/api.github.com\/users\/AtmaHou\/followers","following_url":"https:\/\/api.github.com\/users\/AtmaHou\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AtmaHou\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AtmaHou\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AtmaHou\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AtmaHou\/orgs","repos_url":"https:\/\/api.github.com\/users\/AtmaHou\/repos","events_url":"https:\/\/api.github.com\/users\/AtmaHou\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AtmaHou\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-05-11T09:09:17Z","updated_at":"2021-06-11T04:39:11Z","closed_at":"2021-06-11T04:39:11Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I am training a gpt-2 from scratch using run_clm.py.\r\n\r\nI want to move and reuse the preprocessed dataset (It take 2 hour to preprocess),\r\n\r\nI tried to :\r\n\r\ncopy path_to_cache_dir\/datasets to new_cache_dir\/datasets\r\nset export HF_DATASETS_CACHE=\"new_cache_dir\/\"\r\nbut the program still re-preprocess the whole dataset without loading cache.\r\n\r\nI also tried to torch.save(lm_datasets, fw), but the saved file is only 14M.\r\n\r\nWhat is the proper way to do this?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2345\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2345\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2344","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2344\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2344\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2344\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2344","id":885331505,"node_id":"MDU6SXNzdWU4ODUzMzE1MDU=","number":2344,"title":"Is there a way to join multiple datasets in one?","user":{"login":"alexvaca0","id":35173563,"node_id":"MDQ6VXNlcjM1MTczNTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35173563?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexvaca0","html_url":"https:\/\/github.com\/alexvaca0","followers_url":"https:\/\/api.github.com\/users\/alexvaca0\/followers","following_url":"https:\/\/api.github.com\/users\/alexvaca0\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexvaca0\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexvaca0\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexvaca0\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexvaca0\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexvaca0\/repos","events_url":"https:\/\/api.github.com\/users\/alexvaca0\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexvaca0\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-10T23:16:10Z","updated_at":"2021-05-11T08:24:48Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\nI need to join 2 datasets, one that is in the hub and another I've created from my files. Is there an easy way to join these 2? \n\n**Describe the solution you'd like**\nId like to join them with a merge or join method, just like pandas dataframes. \n\n**Additional context**\nIf you want to extend an existing dataset with more data, for example for training a language model, you need that functionality. I've not found it in the documentation.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2344\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2344\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2343","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2343\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2343\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2343\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2343","id":883208539,"node_id":"MDU6SXNzdWU4ODMyMDg1Mzk=","number":2343,"title":"Columns are removed before or after map function applied?","user":{"login":"taghizad3h","id":8199406,"node_id":"MDQ6VXNlcjgxOTk0MDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8199406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/taghizad3h","html_url":"https:\/\/github.com\/taghizad3h","followers_url":"https:\/\/api.github.com\/users\/taghizad3h\/followers","following_url":"https:\/\/api.github.com\/users\/taghizad3h\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/taghizad3h\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/taghizad3h\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/taghizad3h\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/taghizad3h\/orgs","repos_url":"https:\/\/api.github.com\/users\/taghizad3h\/repos","events_url":"https:\/\/api.github.com\/users\/taghizad3h\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/taghizad3h\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-10T02:36:20Z","updated_at":"2021-05-10T02:36:20Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nAccording to the documentation when applying map function the [remove_columns ](https:\/\/huggingface.co\/docs\/datasets\/processing.html#removing-columns) will be removed after they are passed to the function, but in the [source code](https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html#datasets.Dataset.map) it's documented that they are removed before applying function. I thinks the source code doc is more accurate, right?\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2343\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2343\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2342","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2342\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2342\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2342\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2342","id":882981420,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM2NDg0MzM3","number":2342,"title":"Docs - CER above 1","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-09T23:41:00Z","updated_at":"2021-05-10T13:34:00Z","closed_at":"2021-05-10T13:34:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2342","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2342","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2342.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2342.patch","merged_at":"2021-05-10T13:34:00Z"},"body":"CER can actually be greater than 1.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2342\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2342\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2341","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2341\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2341\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2341\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2341","id":882370933,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM1OTExODI2","number":2341,"title":"Added the Ascent KB","user":{"login":"phongnt570","id":6749421,"node_id":"MDQ6VXNlcjY3NDk0MjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6749421?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/phongnt570","html_url":"https:\/\/github.com\/phongnt570","followers_url":"https:\/\/api.github.com\/users\/phongnt570\/followers","following_url":"https:\/\/api.github.com\/users\/phongnt570\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/phongnt570\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/phongnt570\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/phongnt570\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/phongnt570\/orgs","repos_url":"https:\/\/api.github.com\/users\/phongnt570\/repos","events_url":"https:\/\/api.github.com\/users\/phongnt570\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/phongnt570\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-09T14:17:39Z","updated_at":"2021-05-11T09:16:59Z","closed_at":"2021-05-11T09:16:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2341","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2341","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2341.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2341.patch","merged_at":"2021-05-11T09:16:58Z"},"body":"Added the Ascent Commonsense KB of 8.9M assertions.\r\n\r\n- Paper: [Advanced Semantics for Commonsense Knowledge Extraction (WWW'21)](https:\/\/arxiv.org\/abs\/2011.00905)\r\n- Website: https:\/\/ascent.mpi-inf.mpg.de\/\r\n\r\n(I am the author of the dataset)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2341\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2341\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2340","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2340\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2340\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2340\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2340","id":882370824,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM1OTExNzIx","number":2340,"title":"More consistent copy logic","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-09T14:17:33Z","updated_at":"2021-05-11T08:58:33Z","closed_at":"2021-05-11T08:58:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2340","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2340","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2340.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2340.patch","merged_at":"2021-05-11T08:58:33Z"},"body":"Use `info.copy()` instead of `copy.deepcopy(info)`.\r\n`Features.copy` now creates a deep copy.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2340\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2340\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2338","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2338\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2338\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2338\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2338","id":882046077,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM1NjA3NzQx","number":2338,"title":"fixed download link for web_science","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-09T09:12:20Z","updated_at":"2021-05-10T13:35:53Z","closed_at":"2021-05-10T13:35:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2338","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2338","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2338.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2338.patch","merged_at":"2021-05-10T13:35:53Z"},"body":"Fixes #2337. Should work with:\r\n`dataset = load_dataset(\"web_of_science\", \"WOS11967\", ignore_verifications=True)`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2338\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2338\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2337","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2337\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2337\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2337\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2337","id":881610567,"node_id":"MDU6SXNzdWU4ODE2MTA1Njc=","number":2337,"title":"NonMatchingChecksumError for web_of_science dataset","user":{"login":"nbroad1881","id":24982805,"node_id":"MDQ6VXNlcjI0OTgyODA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24982805?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nbroad1881","html_url":"https:\/\/github.com\/nbroad1881","followers_url":"https:\/\/api.github.com\/users\/nbroad1881\/followers","following_url":"https:\/\/api.github.com\/users\/nbroad1881\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nbroad1881\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nbroad1881\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nbroad1881\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nbroad1881\/orgs","repos_url":"https:\/\/api.github.com\/users\/nbroad1881\/repos","events_url":"https:\/\/api.github.com\/users\/nbroad1881\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nbroad1881\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-09T02:02:02Z","updated_at":"2021-05-10T13:35:53Z","closed_at":"2021-05-10T13:35:53Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"NonMatchingChecksumError when trying to download the web_of_science dataset. \r\n\r\n>NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/data.mendeley.com\/datasets\/9rw3vkcfy4\/6\/files\/c9ea673d-5542-44c0-ab7b-f1311f7d61df\/WebOfScience.zip?dl=1']\r\n\r\nSetting `ignore_verfications=True` results in OSError.\r\n\r\n>OSError: Cannot find data file. \r\nOriginal error:\r\n[Errno 20] Not a directory: '\/root\/.cache\/huggingface\/datasets\/downloads\/37ab2c42f50d553c1d0ea432baca3e9e11fedea4aeec63a81e6b7e25dd10d4e7\/WOS5736\/X.txt'\r\n\r\n```python\r\ndataset = load_dataset('web_of_science', 'WOS5736')\r\n```\r\nThere are 3 data instances and they all don't work. 'WOS5736', 'WOS11967', 'WOS46985'\r\n\r\ndatasets 1.6.2\r\npython 3.7.10\r\nUbuntu 18.04.5 LTS","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2337\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2337\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2336","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2336\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2336\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2336\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2336","id":881298783,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM0ODk1OTU5","number":2336,"title":"Fix overflow issue in interpolation search","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-08T20:51:36Z","updated_at":"2021-05-10T13:29:07Z","closed_at":"2021-05-10T13:26:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2336","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2336","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2336.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2336.patch","merged_at":"2021-05-10T13:26:12Z"},"body":"Fixes #2335 \r\n\r\nMore info about this error can be found [here](https:\/\/stackoverflow.com\/questions\/53239890\/why-do-i-keep-getting-this-error-runtimewarning-overflow-encountered-in-int-sc\/53240100). ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2336\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2336\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2335","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2335\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2335\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2335\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2335","id":881291887,"node_id":"MDU6SXNzdWU4ODEyOTE4ODc=","number":2335,"title":"Index error in Dataset.map","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-08T20:44:57Z","updated_at":"2021-05-10T13:26:12Z","closed_at":"2021-05-10T13:26:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The following code, if executed on master, raises an IndexError (due to overflow):\r\n```python\r\n>>> from datasets import *\r\n>>> d = load_dataset(\"bookcorpus\", split=\"train\")\r\nReusing dataset bookcorpus (C:\\Users\\Mario\\.cache\\huggingface\\datasets\\bookcorpus\\plain_text\\1.0.0\\44662c4a114441c35200992bea923b170e6f13f2f0beb7c14e43759cec498700)\r\n2021-05-08 21:23:46.859818: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library cudart64_101.dll\r\n>>> d.map(lambda ex: ex)\r\n 0%|\u258e | 289430\/74004228 [00:13<58:41, 20935.33ex\/s]c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\table.py:84: RuntimeWarning: overflow encountered in int_scalars\r\n k = i + ((j - i) * (x - arr[i]) \/\/ (arr[j] - arr[i]))\r\n 0%|\u258e | 290162\/74004228 [00:13<59:11, 20757.23ex\/s]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\arrow_dataset.py\", line 1498, in map\r\n new_fingerprint=new_fingerprint,\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\arrow_dataset.py\", line 174, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\fingerprint.py\", line 340, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\arrow_dataset.py\", line 1799, in _map_single\r\n for i, example in enumerate(pbar):\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\site-packages\\tqdm\\std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\arrow_dataset.py\", line 1145, in __iter__\r\n format_kwargs=format_kwargs,\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\arrow_dataset.py\", line 1337, in _getitem\r\n pa_subtable = query_table(self._data, key, indices=self._indices if self._indices is not None else None)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\formatting\\formatting.py\", line 368, in query_table\r\n pa_subtable = _query_table(table, key)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\formatting\\formatting.py\", line 79, in _query_table\r\n return table.fast_slice(key % table.num_rows, 1)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\table.py\", line 128, in fast_slice\r\n i = _interpolation_search(self._offsets, offset)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\table.py\", line 91, in _interpolation_search\r\n raise IndexError(f\"Invalid query '{x}' for size {arr[-1] if len(arr) else 'none'}.\")\r\nIndexError: Invalid query '290162' for size 74004228.\r\n```\r\nTested on Windows, can run on Linux if needed.\r\n\r\nEDIT:\r\nIt seems like for this to happen, the default NumPy dtype has to be np.int32.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2335\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2335\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2334","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2334\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2334\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2334\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2334","id":879810107,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMzNTAzNTEw","number":2334,"title":"Updating the DART file checksums in GEM","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-07T21:53:44Z","updated_at":"2021-05-07T22:18:10Z","closed_at":"2021-05-07T22:18:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2334","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2334","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2334.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2334.patch","merged_at":"2021-05-07T22:18:10Z"},"body":"The DART files were just updated on the source GitHub\r\n\r\nhttps:\/\/github.com\/Yale-LILY\/dart\/commit\/34b3c872da4811523e334f1631e54ca8105dffab","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2334\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2334\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2333","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2333\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2333\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2333\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2333","id":879214067,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMyOTUwNzIy","number":2333,"title":"Fix duplicate keys","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-07T15:28:08Z","updated_at":"2021-05-08T21:47:31Z","closed_at":"2021-05-07T15:57:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2333","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2333","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2333.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2333.patch","merged_at":"2021-05-07T15:57:08Z"},"body":"As noticed in https:\/\/github.com\/huggingface\/datasets\/pull\/2245, many datasets yield duplicate keys.\r\nMost of the time it was because the counter used for ids were reset at each new data file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2333\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2333\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2332","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2332\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2332\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2332\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2332","id":879041608,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMyNzk1NDE4","number":2332,"title":"Add note about indices mapping in save_to_disk docstring","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-07T13:49:42Z","updated_at":"2021-05-07T17:20:48Z","closed_at":"2021-05-07T17:20:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2332","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2332","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2332.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2332.patch","merged_at":"2021-05-07T17:20:48Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2332\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2332\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2331","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2331\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2331\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2331\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2331","id":879031427,"node_id":"MDU6SXNzdWU4NzkwMzE0Mjc=","number":2331,"title":"Add Topical-Chat","user":{"login":"ktangri","id":22266659,"node_id":"MDQ6VXNlcjIyMjY2NjU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22266659?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ktangri","html_url":"https:\/\/github.com\/ktangri","followers_url":"https:\/\/api.github.com\/users\/ktangri\/followers","following_url":"https:\/\/api.github.com\/users\/ktangri\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ktangri\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ktangri\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ktangri\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ktangri\/orgs","repos_url":"https:\/\/api.github.com\/users\/ktangri\/repos","events_url":"https:\/\/api.github.com\/users\/ktangri\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ktangri\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-07T13:43:59Z","updated_at":"2021-05-07T13:43:59Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Topical-Chat\r\n- **Description:** a knowledge-grounded human-human conversation dataset where the underlying knowledge spans 8 broad topics and conversation partners don\u2019t have explicitly defined roles\r\n- **Paper:** https:\/\/www.isca-speech.org\/archive\/Interspeech_2019\/pdfs\/3079.pdf\r\n- **Data:** https:\/\/github.com\/alexa\/Topical-Chat\r\n- **Motivation:** Good quality, knowledge-grounded dataset that spans a broad range of topics\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2331\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2331\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2330","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2330\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2330\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2330\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2330","id":878490927,"node_id":"MDU6SXNzdWU4Nzg0OTA5Mjc=","number":2330,"title":"Allow passing `desc` to `tqdm` in `Dataset.map()`","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-07T05:52:54Z","updated_at":"2021-05-26T14:59:21Z","closed_at":"2021-05-26T14:59:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It's normal to have many `map()` calls, and some of them can take a few minutes,\r\nit would be nice to have a description on the progress bar.\r\n\r\nAlternative solution:\r\nPrint the description before\/after the `map()` call.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2330\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2330\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2329","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2329\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2329\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2329\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2329","id":877924198,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMxODA3MTk0","number":2329,"title":"Add cache dir for in-memory datasets","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-05-06T19:35:32Z","updated_at":"2021-06-08T19:46:48Z","closed_at":"2021-06-08T19:06:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2329","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2329","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2329.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2329.patch","merged_at":null},"body":"Adds the cache dir attribute to DatasetInfo as suggested by @lhoestq.\r\n\r\nShould fix #2322 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2329\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2329\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2328","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2328\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2328\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2328\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2328","id":877673896,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMxNTg2MzU2","number":2328,"title":"Add Matthews\/Pearson\/Spearman correlation metrics","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-06T16:09:27Z","updated_at":"2021-05-06T16:58:10Z","closed_at":"2021-05-06T16:58:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2328","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2328","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2328.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2328.patch","merged_at":"2021-05-06T16:58:10Z"},"body":"Added three metrics:\r\n- The Matthews correlation coefficient (from sklearn)\r\n- The Pearson correlation coefficient (from scipy)\r\n- The Spearman correlation coefficient (from scipy)\r\n\r\ncc @sgugger ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2328\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2328\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2327","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2327\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2327\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2327\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2327","id":877565831,"node_id":"MDU6SXNzdWU4Nzc1NjU4MzE=","number":2327,"title":"A syntax error in example","user":{"login":"mymusise","id":6883957,"node_id":"MDQ6VXNlcjY4ODM5NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6883957?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mymusise","html_url":"https:\/\/github.com\/mymusise","followers_url":"https:\/\/api.github.com\/users\/mymusise\/followers","following_url":"https:\/\/api.github.com\/users\/mymusise\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mymusise\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mymusise\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mymusise\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mymusise\/orgs","repos_url":"https:\/\/api.github.com\/users\/mymusise\/repos","events_url":"https:\/\/api.github.com\/users\/mymusise\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mymusise\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-06T14:34:44Z","updated_at":"2021-05-20T03:04:19Z","closed_at":"2021-05-20T03:04:19Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"![image](https:\/\/user-images.githubusercontent.com\/6883957\/117315905-b47a5c00-aeba-11eb-91eb-b2a4a0212a56.png)\r\n\r\nSorry to report with an image, I can't find the template source code of this snippet.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2327\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2327\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2326","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2326\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2326\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2326\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2326","id":876829254,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwODk3MjI4","number":2326,"title":"Enable auto-download for PAN-X \/ Wikiann domain in XTREME","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-05T20:58:38Z","updated_at":"2021-05-07T08:41:10Z","closed_at":"2021-05-07T08:41:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2326","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2326","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2326.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2326.patch","merged_at":"2021-05-07T08:41:10Z"},"body":"This PR replaces the manual download of the `PAN-X.lang` domains with an auto-download from a Dropbox link provided by the Wikiann author. We also add the relevant dummy data for these domains.\r\n\r\nWhile re-generating `dataset_infos.json` I ran into a `KeyError` in the `udpos.Arabic` domain so have included a fix for this as well.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2326\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2326\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2325","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2325\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2325\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2325\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2325","id":876653121,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwNzU1MzIx","number":2325,"title":"Added the HLGD dataset","user":{"login":"tingofurro","id":2609265,"node_id":"MDQ6VXNlcjI2MDkyNjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2609265?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tingofurro","html_url":"https:\/\/github.com\/tingofurro","followers_url":"https:\/\/api.github.com\/users\/tingofurro\/followers","following_url":"https:\/\/api.github.com\/users\/tingofurro\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tingofurro\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tingofurro\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tingofurro\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tingofurro\/orgs","repos_url":"https:\/\/api.github.com\/users\/tingofurro\/repos","events_url":"https:\/\/api.github.com\/users\/tingofurro\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tingofurro\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-05T16:53:29Z","updated_at":"2021-05-12T14:55:13Z","closed_at":"2021-05-12T14:16:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2325","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2325","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2325.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2325.patch","merged_at":"2021-05-12T14:16:38Z"},"body":"Added the Headline Grouping Dataset (HLGD), from the NAACL2021 paper: News Headline Grouping as a Challenging NLU Task\r\nDataset Link: https:\/\/github.com\/tingofurro\/headline_grouping\r\nPaper link: https:\/\/people.eecs.berkeley.edu\/~phillab\/pdfs\/NAACL2021_HLG.pdf","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2325\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2325\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2324","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2324\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2324\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2324\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2324","id":876602064,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwNzE1NTQz","number":2324,"title":"Create Audio feature","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/8","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8\/labels","id":6968069,"node_id":"MI_kwDODunzps4AalMF","number":8,"title":"1.12","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":4,"closed_issues":2,"state":"open","created_at":"2021-07-21T15:34:56Z","updated_at":"2021-10-13T10:26:33Z","due_on":"2021-08-30T07:00:00Z","closed_at":null},"comments":30,"created_at":"2021-05-05T15:55:22Z","updated_at":"2021-10-13T10:26:33Z","closed_at":"2021-10-13T10:26:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2324","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2324","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2324.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2324.patch","merged_at":"2021-10-13T10:26:33Z"},"body":"Create `Audio` feature to handle raw audio files.\r\n\r\nSome decisions to be further discussed:\r\n- I have chosen `soundfile` as the audio library; another interesting library is `librosa`, but this requires `soundfile` (see [here](https:\/\/github.com\/librosa\/librosa\/blob\/main\/setup.cfg#L53)). If we require some more advanced functionalities, we could eventually switch the library.\r\n- I have implemented the audio feature as an extra: `pip install datasets[audio]`. For the moment, the typical datasets user uses only text datasets, and there is no need for them for additional package requirements for audio\/image if they do not need them.\r\n- For tests, I require audio dependencies (so that all audio functionalities are checked with our CI test suite); I exclude Linux platforms, which require an additional library to be installed with the distribution package manager\r\n - I also require `pytest-datadir`, which allow to have (audio) data files for tests\r\n- The audio data contain: array and sample_rate.\r\n- The array is reshaped as 1D array (expected input for `Wav2Vec2`).\r\n\r\nNote that to install `soundfile` on Linux, you need to install `libsndfile` using your distribution\u2019s package manager, for example `sudo apt-get install libsndfile1`.\r\n\r\n## Requirements Specification\r\n- Access example with audio loading and resampling:\r\n ```python\r\n ds[0][\"audio\"]\r\n ```\r\n- Map with audio loading & resampling:\r\n ```python\r\n def preprocess(batch):\r\n batch[\"input_values\"] = processor(batch[\"audio\"]).input_values\r\n return batch\r\n\r\n ds = ds.map(preprocess)\r\n ```\r\n- Map without audio loading and resampling:\r\n ```python\r\n def preprocess(batch):\r\n batch[\"labels\"] = processor(batch[\"target_text\"]).input_values\r\n return batch\r\n\r\n ds = ds.map(preprocess)\r\n ```\r\n- Additional requirement specification (see https:\/\/github.com\/huggingface\/datasets\/pull\/2324#pullrequestreview-768864998): Cast audio column to change sampling sate:\r\n ```python\r\n ds = ds.cast_column(\"audio\", Audio(sampling_rate=16_000))\r\n ```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2324\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":3,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2324\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2323","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2323\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2323\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2323\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2323","id":876438507,"node_id":"MDU6SXNzdWU4NzY0Mzg1MDc=","number":2323,"title":"load_dataset(\"timit_asr\") gives back duplicates of just one sample text","user":{"login":"ekeleshian","id":33647474,"node_id":"MDQ6VXNlcjMzNjQ3NDc0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33647474?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ekeleshian","html_url":"https:\/\/github.com\/ekeleshian","followers_url":"https:\/\/api.github.com\/users\/ekeleshian\/followers","following_url":"https:\/\/api.github.com\/users\/ekeleshian\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ekeleshian\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ekeleshian\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ekeleshian\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ekeleshian\/orgs","repos_url":"https:\/\/api.github.com\/users\/ekeleshian\/repos","events_url":"https:\/\/api.github.com\/users\/ekeleshian\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ekeleshian\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-05T13:14:48Z","updated_at":"2021-05-07T10:32:30Z","closed_at":"2021-05-07T10:32:30Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen you look up on key [\"train\"] and then ['text'], you get back a list with just one sentence duplicated 4620 times. Namely, the sentence \"Would such an act of refusal be useful?\". Similarly when you look up ['test'] and then ['text'], the list is one sentence repeated \"The bungalow was pleasantly situated near the shore.\" 1680 times. \r\n\r\nI tried to work around the issue by downgrading to datasets version 1.3.0, inspired by [this post](https:\/\/www.gitmemory.com\/issue\/huggingface\/datasets\/2052\/798904836) and removing the entire huggingface directory from ~\/.cache, but I still get the same issue. \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ntimit = load_dataset(\"timit_asr\")\r\nprint(timit['train']['text'])\r\nprint(timit['test']['text'])\r\n```\r\n\r\n## Expected Result\r\nRows of diverse text, like how it is shown in the [wav2vec2.0 tutorial](https:\/\/colab.research.google.com\/github\/patrickvonplaten\/notebooks\/blob\/master\/Fine_tuning_Wav2Vec2_for_English_ASR.ipynb)\r\n\"Screen\r\n\r\n\r\n## Actual results\r\nRows of repeated text.\r\n\"Screen\r\n\r\n\r\n## Versions\r\n- Datasets: 1.3.0\r\n- Python: 3.9.1\r\n- Platform: macOS-11.2.1-x86_64-i386-64bit}\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2323\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2323\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2322","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2322\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2322\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2322\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2322","id":876383853,"node_id":"MDU6SXNzdWU4NzYzODM4NTM=","number":2322,"title":"Calls to map are not cached.","user":{"login":"villmow","id":2743060,"node_id":"MDQ6VXNlcjI3NDMwNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2743060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/villmow","html_url":"https:\/\/github.com\/villmow","followers_url":"https:\/\/api.github.com\/users\/villmow\/followers","following_url":"https:\/\/api.github.com\/users\/villmow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/villmow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/villmow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/villmow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/villmow\/orgs","repos_url":"https:\/\/api.github.com\/users\/villmow\/repos","events_url":"https:\/\/api.github.com\/users\/villmow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/villmow\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-05T12:11:27Z","updated_at":"2021-06-08T19:10:02Z","closed_at":"2021-06-08T19:08:21Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nSomehow caching does not work for me anymore. Am I doing something wrong, or is there anything that I missed?\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n\r\nimport datasets\r\ndatasets.set_caching_enabled(True)\r\nsst = datasets.load_dataset(\"sst\")\r\n\r\ndef foo(samples, i):\r\n print(\"executed\", i[:10])\r\n return samples\r\n\r\n# first call\r\nx = sst.map(foo, batched=True, with_indices=True, num_proc=2)\r\n\r\nprint('\\n'*3, \"#\" * 30, '\\n'*3)\r\n\r\n# second call\r\ny = sst.map(foo, batched=True, with_indices=True, num_proc=2)\r\n\r\n# print version\r\nimport sys\r\nimport platform\r\nprint(f\"\"\"\r\n- Datasets: {datasets.__version__}\r\n- Python: {sys.version}\r\n- Platform: {platform.platform()}\r\n\"\"\")\r\n```\r\n\r\n## Actual results\r\nThis code prints the following output for me:\r\n```bash\r\nNo config specified, defaulting to: sst\/default\r\nReusing dataset sst (\/home\/johannes\/.cache\/huggingface\/datasets\/sst\/default\/1.0.0\/b8a7889ef01c5d3ae8c379b84cc4080f8aad3ac2bc538701cbe0ac6416fb76ff)\r\n#0: 0%| | 0\/5 [00:00>> from datasets import load_dataset\r\n>>> dataset = load_dataset(\"oscar\", \"unshuffled_deduplicated_af\")\r\nDownloading: 14.7kB [00:00, 4.91MB\/s]\r\nDownloading: 3.07MB [00:00, 32.6MB\/s]\r\nDownloading and preparing dataset oscar\/unshuffled_deduplicated_af (download: 62.93 MiB, generated: 163.38 MiB, post-processed: Unknown size, total: 226.32 MiB) to C:\\Users\\sgraaf\\.cache\\huggingface\\datasets\\oscar\\unshuffled_deduplicated_af\\1.0.0\\bd4f96df5b4512007ef9fd17bbc1ecde459fa53d2fc0049cf99392ba2efcc464...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 81.0\/81.0 [00:00<00:00, 40.5kB\/s]\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 66.0M\/66.0M [00:18<00:00, 3.50MB\/s]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\datasets\\load.py\", line 745, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\datasets\\builder.py\", line 574, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\datasets\\builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\datasets\\builder.py\", line 979, in _prepare_split\r\n for key, record in utils.tqdm(\r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\tqdm\\std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"C:\\Users\\sgraaf\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\oscar\\bd4f96df5b4512007ef9fd17bbc1ecde459fa53d2fc0049cf99392ba2efcc464\\oscar.py\", line 359, in _generate_examples\r\n for line in f:\r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\encodings\\cp1252.py\", line 23, in decode\r\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\r\nUnicodeDecodeError: 'charmap' codec can't decode byte 0x9d in position 7454: character maps to \r\n```\r\n\r\n## Versions\r\nPaste the output of the following code:\r\n```python\r\nimport datasets\r\nimport sys\r\nimport platform\r\n\r\nprint(f\"\"\"\r\n- Datasets: {datasets.__version__}\r\n- Python: {sys.version}\r\n- Platform: {platform.platform()}\r\n\"\"\")\r\n```\r\n- Datasets: 1.6.2\r\n- Python: 3.9.4 (tags\/v3.9.4:1f2e308, Apr 6 2021, 13:40:21) [MSC v.1928 64 bit (AMD64)]\r\n- Platform: Windows-10-10.0.19041-SP0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2319\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2319\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2318","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2318\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2318\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2318\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2318","id":876212460,"node_id":"MDU6SXNzdWU4NzYyMTI0NjA=","number":2318,"title":"[api request] API to obtain \"dataset_module\" dynamic path?","user":{"login":"richardliaw","id":4529381,"node_id":"MDQ6VXNlcjQ1MjkzODE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4529381?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richardliaw","html_url":"https:\/\/github.com\/richardliaw","followers_url":"https:\/\/api.github.com\/users\/richardliaw\/followers","following_url":"https:\/\/api.github.com\/users\/richardliaw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richardliaw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richardliaw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richardliaw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richardliaw\/orgs","repos_url":"https:\/\/api.github.com\/users\/richardliaw\/repos","events_url":"https:\/\/api.github.com\/users\/richardliaw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richardliaw\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-05-05T08:40:48Z","updated_at":"2021-05-06T08:45:45Z","closed_at":"2021-05-06T07:57:54Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nA clear and concise description of what the problem is.\r\n\r\nThis is an awesome library. \r\n\r\nIt seems like the dynamic module path in this library has broken some of hyperparameter tuning functionality: https:\/\/discuss.huggingface.co\/t\/using-hyperparameter-search-in-trainer\/785\/34\r\n\r\nThis is because Ray will spawn new processes, and each process will load modules by path. However, we need to explicitly inform Ray to load the right modules, or else it will error upon import. \r\n\r\nI'd like an API to obtain the dynamic paths. This will allow us to support this functionality in this awesome library while being future proof.\r\n\r\n**Describe the solution you'd like**\r\nA clear and concise description of what you want to happen.\r\n\r\n`datasets.get_dynamic_paths -> List[str]` will be sufficient for my use case.\r\n\r\nBy offering this API, we will be able to address the following issues (by patching the ray integration sufficiently):\r\n\r\nhttps:\/\/github.com\/huggingface\/blog\/issues\/106\r\nhttps:\/\/github.com\/huggingface\/transformers\/issues\/11565\r\nhttps:\/\/discuss.huggingface.co\/t\/using-hyperparameter-search-in-trainer\/785\/34\r\nhttps:\/\/discuss.huggingface.co\/t\/using-hyperparameter-search-in-trainer\/785\/35\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2318\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2318\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2317","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2317\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2317\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2317\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2317","id":875767318,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwMDQxNzc4","number":2317,"title":"Fix incorrect version specification for the pyarrow package","user":{"login":"cemilcengiz","id":32267027,"node_id":"MDQ6VXNlcjMyMjY3MDI3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32267027?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cemilcengiz","html_url":"https:\/\/github.com\/cemilcengiz","followers_url":"https:\/\/api.github.com\/users\/cemilcengiz\/followers","following_url":"https:\/\/api.github.com\/users\/cemilcengiz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cemilcengiz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cemilcengiz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cemilcengiz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cemilcengiz\/orgs","repos_url":"https:\/\/api.github.com\/users\/cemilcengiz\/repos","events_url":"https:\/\/api.github.com\/users\/cemilcengiz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cemilcengiz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-04T19:30:20Z","updated_at":"2021-05-05T10:09:16Z","closed_at":"2021-05-05T09:21:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2317","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2317","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2317.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2317.patch","merged_at":"2021-05-05T09:21:58Z"},"body":"This PR addresses the bug in the pyarrow version specification, which is detailed in #2316 .\r\nSimply, I put a comma between the version bounds.\r\n\r\nFix #2316.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2317\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2317\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2316","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2316\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2316\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2316\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2316","id":875756353,"node_id":"MDU6SXNzdWU4NzU3NTYzNTM=","number":2316,"title":"Incorrect version specification for pyarrow","user":{"login":"cemilcengiz","id":32267027,"node_id":"MDQ6VXNlcjMyMjY3MDI3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32267027?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cemilcengiz","html_url":"https:\/\/github.com\/cemilcengiz","followers_url":"https:\/\/api.github.com\/users\/cemilcengiz\/followers","following_url":"https:\/\/api.github.com\/users\/cemilcengiz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cemilcengiz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cemilcengiz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cemilcengiz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cemilcengiz\/orgs","repos_url":"https:\/\/api.github.com\/users\/cemilcengiz\/repos","events_url":"https:\/\/api.github.com\/users\/cemilcengiz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cemilcengiz\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-04T19:15:11Z","updated_at":"2021-05-05T10:10:03Z","closed_at":"2021-05-05T10:10:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nThe pyarrow dependency is incorrectly specified in setup.py file, in [this line](https:\/\/github.com\/huggingface\/datasets\/blob\/3a3e5a4da20bfcd75f8b6a6869b240af8feccc12\/setup.py#L77).\r\nAlso as a snippet:\r\n```python\r\n \"pyarrow>=1.0.0<4.0.0\",\r\n```\r\n\r\n## Steps to reproduce the bug\r\n```bash\r\n pip install \"pyarrow>=1.0.0<4.0.0\"\r\n```\r\n\r\n## Expected results\r\nIt is expected to get a pyarrow version between 1.0.0 (inclusive) and 4.0.0 (exclusive).\r\n\r\n## Actual results\r\npip ignores the specified versions since there is a missing comma between the lower and upper limits. Therefore, pip installs the latest pyarrow version from PYPI, which is 4.0.0.\r\nThis is especially problematic since \"conda env export\" fails due to incorrect version specification. Here is the conda error as well:\r\n```bash\r\nconda env export\r\nInvalidVersionSpec: Invalid version '1.0.0<4.0.0': invalid character(s)\r\n```\r\n\r\n\r\n## Fix suggestion\r\nPut a comma between the version limits which means replacing the line in setup.py file with the following:\r\n```python\r\n \"pyarrow>=1.0.0,<4.0.0\",\r\n```\r\n\r\n## Versions\r\nPaste the output of the following code:\r\n```python\r\n- Datasets: 1.6.2\r\n- Python: 3.7.10 (default, Feb 26 2021, 18:47:35) \r\n[GCC 7.3.0]\r\n- Platform: Linux-5.4.0-42-generic-x86_64-with-debian-buster-sid\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2316\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2316\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2315","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2315\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2315\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2315\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2315","id":875742200,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwMDIyMDYy","number":2315,"title":"Datasets cli improvements","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-04T18:55:11Z","updated_at":"2021-05-10T16:36:51Z","closed_at":"2021-05-10T16:36:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2315","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2315","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2315.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2315.patch","merged_at":"2021-05-10T16:36:50Z"},"body":"This PR:\r\n* replaces the code from the `bug_report.md` that was used to get relevant system info with a dedicated command (a more elegant approach than copy-pasting the code IMO)\r\n* removes the `download` command (copied from the transformers repo?)\r\n* adds missing help messages to the cli commands\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2315\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2315\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2314","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2314\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2314\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2314\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2314","id":875729271,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwMDExODc4","number":2314,"title":"Minor refactor prepare_module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-04T18:37:26Z","updated_at":"2021-10-13T09:07:34Z","closed_at":"2021-10-13T09:07:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2314","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2314","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2314.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2314.patch","merged_at":null},"body":"Start to refactor `prepare_module` to try to decouple functionality.\r\n\r\nThis PR does:\r\n- extract function `_initialize_dynamic_modules_namespace_package`\r\n- extract function `_find_module_in_github_or_s3`\r\n- some renaming of variables\r\n- use of f-strings","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2314\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2314\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2313","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2313\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2313\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2313\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2313","id":875475367,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI5ODEwNTc4","number":2313,"title":"Remove unused head_hf_s3 function","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-04T13:42:06Z","updated_at":"2021-05-07T09:31:42Z","closed_at":"2021-05-07T09:31:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2313","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2313","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2313.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2313.patch","merged_at":null},"body":"Currently, the function `head_hf_s3` is not used:\r\n- neither its returned result is used\r\n- nor it raises any exception, as exceptions are catched and returned (not raised)\r\n\r\nThis PR removes it.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2313\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2313\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2312","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2312\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2312\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2312\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2312","id":875435726,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI5Nzc4NjUz","number":2312,"title":"Add rename_columnS method","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-04T12:57:53Z","updated_at":"2021-05-04T13:43:13Z","closed_at":"2021-05-04T13:43:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2312","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2312","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2312.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2312.patch","merged_at":"2021-05-04T13:43:12Z"},"body":"Cherry-picked from #2255 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2312\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2312\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2311","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2311\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2311\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2311\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2311","id":875262208,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI5NjQwNTMx","number":2311,"title":"Add SLR52, SLR53 and SLR54 to OpenSLR","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-04T09:08:03Z","updated_at":"2021-05-07T09:50:55Z","closed_at":"2021-05-07T09:50:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2311","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2311","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2311.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2311.patch","merged_at":"2021-05-07T09:50:55Z"},"body":"Add large speech datasets for Sinhala, Bengali and Nepali.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2311\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2311\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2310","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2310\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2310\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2310\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2310","id":875096051,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI5NTEwNTg5","number":2310,"title":"Update README.md","user":{"login":"cryoff","id":15029054,"node_id":"MDQ6VXNlcjE1MDI5MDU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15029054?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cryoff","html_url":"https:\/\/github.com\/cryoff","followers_url":"https:\/\/api.github.com\/users\/cryoff\/followers","following_url":"https:\/\/api.github.com\/users\/cryoff\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cryoff\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cryoff\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cryoff\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cryoff\/orgs","repos_url":"https:\/\/api.github.com\/users\/cryoff\/repos","events_url":"https:\/\/api.github.com\/users\/cryoff\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cryoff\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-04T04:38:01Z","updated_at":"2021-05-04T06:35:59Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2310","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2310","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2310.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2310.patch","merged_at":null},"body":"Provides description of data instances and dataset features","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2310\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2310\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2309","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2309\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2309\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2309\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2309","id":874644990,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI5MTU4NjQx","number":2309,"title":"Fix conda release","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-03T14:52:59Z","updated_at":"2021-05-03T16:01:17Z","closed_at":"2021-05-03T16:01:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2309","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2309","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2309.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2309.patch","merged_at":"2021-05-03T16:01:17Z"},"body":"There were a few issues with conda releases (they've been failing for a while now).\r\nTo fix this I had to:\r\n- add the --single-version-externally-managed tag to the build stage (suggestion from [here](https:\/\/stackoverflow.com\/a\/64825075))\r\n- set the python version of the conda build stage to 3.8 since 3.9 isn't supported\r\n- sync the evrsion requirement of `huggingface_hub`\r\n\r\nWith these changes I'm working on uploading all missing versions until 1.6.2 to conda\r\n\r\nEDIT: I managed to build and upload all missing versions until 1.6.2 to conda :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2309\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2309\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2308","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2308\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2308\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2308\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2308","id":874559846,"node_id":"MDU6SXNzdWU4NzQ1NTk4NDY=","number":2308,"title":"Add COCO evaluation metrics","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-05-03T13:08:05Z","updated_at":"2021-06-04T07:11:27Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm currently working on adding Facebook AI's DETR model (end-to-end object detection with Transformers) to HuggingFace Transformers. The model is working fine, but regarding evaluation, I'm currently relying on external `CocoEvaluator` and `PanopticEvaluator` objects which are defined in the original repository ([here](https:\/\/github.com\/facebookresearch\/detr\/blob\/a54b77800eb8e64e3ad0d8237789fcbf2f8350c5\/datasets\/coco_eval.py#L22) and [here](https:\/\/github.com\/facebookresearch\/detr\/blob\/a54b77800eb8e64e3ad0d8237789fcbf2f8350c5\/datasets\/panoptic_eval.py#L13) respectively). \r\n\r\nRunning these in a notebook gives you nice summaries like this:\r\n![image](https:\/\/user-images.githubusercontent.com\/48327001\/116878842-326f0680-ac20-11eb-9061-d6da02193694.png)\r\n\r\nIt would be great if we could import these metrics from the Datasets library, something like this:\r\n\r\n```\r\nimport datasets\r\n\r\nmetric = datasets.load_metric('coco')\r\n\r\nfor model_input, gold_references in evaluation_dataset:\r\n model_predictions = model(model_inputs)\r\n metric.add_batch(predictions=model_predictions, references=gold_references)\r\n\r\nfinal_score = metric.compute()\r\n```\r\n\r\nI think this would be great for object detection and semantic\/panoptic segmentation in general, not just for DETR. Reproducing results of object detection papers would be way easier.\r\n\r\nHowever, object detection and panoptic segmentation evaluation is a bit more complex than accuracy (it's more like a summary of metrics at different thresholds rather than a single one). I'm not sure how to proceed here, but happy to help making this possible.\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2308\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2308\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2302","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2302\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2302\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2302\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2302","id":873961435,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI4NjIzMDQ3","number":2302,"title":"Add SubjQA dataset","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-05-02T14:51:20Z","updated_at":"2021-05-10T09:21:19Z","closed_at":"2021-05-10T09:21:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2302","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2302","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2302.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2302.patch","merged_at":"2021-05-10T09:21:19Z"},"body":"Hello datasetters \ud83d\ude42!\r\n\r\nHere's an interesting dataset about extractive question-answering on _subjective_ product \/ restaurant reviews. It's quite challenging for models fine-tuned on SQuAD and provides a nice example of domain adaptation (i.e. fine-tuning a SQuAD model on this domain gives better performance).\r\n\r\nI found a bug in the start\/end indices that I've proposed a fix for here: https:\/\/github.com\/megagonlabs\/SubjQA\/pull\/2\r\n\r\nUnfortunately, the dataset creators are unresponsive, so for now I am using my fork as the source. Will update the URL if\/when the creators respond.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2302\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2302\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2301","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2301\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2301\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2301\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2301","id":873941266,"node_id":"MDU6SXNzdWU4NzM5NDEyNjY=","number":2301,"title":"Unable to setup dev env on Windows","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-02T13:20:42Z","updated_at":"2021-05-03T15:18:01Z","closed_at":"2021-05-03T15:17:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\n\r\nI tried installing the `\".[dev]\"` version on Windows 10 after cloning.\r\n\r\nHere is the error I'm facing:\r\n\r\n```bat\r\n(env) C:\\testing\\datasets>pip install -e \".[dev]\"\r\nObtaining file:\/\/\/C:\/testing\/datasets\r\nRequirement already satisfied: numpy>=1.17 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.19.5)\r\nCollecting pyarrow>=0.17.1\r\n Using cached pyarrow-4.0.0-cp37-cp37m-win_amd64.whl (13.3 MB)\r\nRequirement already satisfied: dill in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (0.3.1.1)\r\nCollecting pandas\r\n Using cached pandas-1.2.4-cp37-cp37m-win_amd64.whl (9.1 MB)\r\nRequirement already satisfied: requests>=2.19.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (2.25.1)\r\nRequirement already satisfied: tqdm<4.50.0,>=4.27 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (4.49.0)\r\nRequirement already satisfied: xxhash in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (2.0.2)\r\nCollecting multiprocess\r\n Using cached multiprocess-0.70.11.1-py37-none-any.whl (108 kB)\r\nRequirement already satisfied: fsspec in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (2021.4.0)\r\nCollecting huggingface_hub<0.1.0\r\n Using cached huggingface_hub-0.0.8-py3-none-any.whl (34 kB)\r\nRequirement already satisfied: importlib_metadata in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (4.0.1)\r\nRequirement already satisfied: absl-py in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (0.12.0)\r\nRequirement already satisfied: pytest in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (6.2.3)\r\nCollecting pytest-xdist\r\n Using cached pytest_xdist-2.2.1-py3-none-any.whl (37 kB)\r\nCollecting apache-beam>=2.24.0\r\n Using cached apache_beam-2.29.0-cp37-cp37m-win_amd64.whl (3.7 MB)\r\nCollecting elasticsearch\r\n Using cached elasticsearch-7.12.1-py2.py3-none-any.whl (339 kB)\r\nRequirement already satisfied: boto3==1.16.43 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.16.43)\r\nRequirement already satisfied: botocore==1.19.43 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.19.43)\r\nCollecting moto[s3]==1.3.16\r\n Using cached moto-1.3.16-py2.py3-none-any.whl (879 kB)\r\nCollecting rarfile>=4.0\r\n Using cached rarfile-4.0-py3-none-any.whl (28 kB)\r\nCollecting tensorflow>=2.3\r\n Using cached tensorflow-2.4.1-cp37-cp37m-win_amd64.whl (370.7 MB)\r\nRequirement already satisfied: torch in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.8.1)\r\nRequirement already satisfied: transformers in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (4.5.1)\r\nCollecting bs4\r\n Using cached bs4-0.0.1-py3-none-any.whl\r\nCollecting conllu\r\n Using cached conllu-4.4-py2.py3-none-any.whl (15 kB)\r\nCollecting langdetect\r\n Using cached langdetect-1.0.8-py3-none-any.whl\r\nCollecting lxml\r\n Using cached lxml-4.6.3-cp37-cp37m-win_amd64.whl (3.5 MB)\r\nCollecting mwparserfromhell\r\n Using cached mwparserfromhell-0.6-cp37-cp37m-win_amd64.whl (101 kB)\r\nCollecting nltk\r\n Using cached nltk-3.6.2-py3-none-any.whl (1.5 MB)\r\nCollecting openpyxl\r\n Using cached openpyxl-3.0.7-py2.py3-none-any.whl (243 kB)\r\nCollecting py7zr\r\n Using cached py7zr-0.15.2-py3-none-any.whl (66 kB)\r\nCollecting tldextract\r\n Using cached tldextract-3.1.0-py2.py3-none-any.whl (87 kB)\r\nCollecting zstandard\r\n Using cached zstandard-0.15.2-cp37-cp37m-win_amd64.whl (582 kB)\r\nCollecting bert_score>=0.3.6\r\n Using cached bert_score-0.3.9-py3-none-any.whl (59 kB)\r\nCollecting rouge_score\r\n Using cached rouge_score-0.0.4-py2.py3-none-any.whl (22 kB)\r\nCollecting sacrebleu\r\n Using cached sacrebleu-1.5.1-py3-none-any.whl (54 kB)\r\nRequirement already satisfied: scipy in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.6.3)\r\nCollecting seqeval\r\n Using cached seqeval-1.2.2-py3-none-any.whl\r\nCollecting sklearn\r\n Using cached sklearn-0.0-py2.py3-none-any.whl\r\nCollecting jiwer\r\n Using cached jiwer-2.2.0-py3-none-any.whl (13 kB)\r\nRequirement already satisfied: toml>=0.10.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (0.10.2)\r\nRequirement already satisfied: requests_file>=1.5.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.5.1)\r\nRequirement already satisfied: texttable>=1.6.3 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.6.3)\r\nRequirement already satisfied: s3fs>=0.4.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (0.4.2)\r\nRequirement already satisfied: Werkzeug>=1.0.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.0.1)\r\nCollecting black\r\n Using cached black-21.4b2-py3-none-any.whl (130 kB)\r\nCollecting isort\r\n Using cached isort-5.8.0-py3-none-any.whl (103 kB)\r\nCollecting flake8==3.7.9\r\n Using cached flake8-3.7.9-py2.py3-none-any.whl (69 kB)\r\nRequirement already satisfied: jmespath<1.0.0,>=0.7.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from boto3==1.16.43->datasets==1.5.0.dev0) (0.10.0)\r\nRequirement already satisfied: s3transfer<0.4.0,>=0.3.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from boto3==1.16.43->datasets==1.5.0.dev0) (0.3.7)\r\nRequirement already satisfied: urllib3<1.27,>=1.25.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from botocore==1.19.43->datasets==1.5.0.dev0) (1.26.4)\r\nRequirement already satisfied: python-dateutil<3.0.0,>=2.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from botocore==1.19.43->datasets==1.5.0.dev0) (2.8.1)\r\nCollecting entrypoints<0.4.0,>=0.3.0\r\n Using cached entrypoints-0.3-py2.py3-none-any.whl (11 kB)\r\nCollecting pyflakes<2.2.0,>=2.1.0\r\n Using cached pyflakes-2.1.1-py2.py3-none-any.whl (59 kB)\r\nCollecting pycodestyle<2.6.0,>=2.5.0\r\n Using cached pycodestyle-2.5.0-py2.py3-none-any.whl (51 kB)\r\nCollecting mccabe<0.7.0,>=0.6.0\r\n Using cached mccabe-0.6.1-py2.py3-none-any.whl (8.6 kB)\r\nRequirement already satisfied: jsondiff>=1.1.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.3.0)\r\nRequirement already satisfied: pytz in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (2021.1)\r\nRequirement already satisfied: mock in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (4.0.3)\r\nRequirement already satisfied: MarkupSafe<2.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.1.1)\r\nRequirement already satisfied: python-jose[cryptography]<4.0.0,>=3.1.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (3.2.0)\r\nRequirement already satisfied: aws-xray-sdk!=0.96,>=0.93 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.8.0)\r\nRequirement already satisfied: cryptography>=2.3.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (3.4.7)\r\nRequirement already satisfied: more-itertools in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (8.7.0)\r\nRequirement already satisfied: PyYAML>=5.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (5.4.1)\r\nRequirement already satisfied: boto>=2.36.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.49.0)\r\nRequirement already satisfied: idna<3,>=2.5 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.10)\r\nRequirement already satisfied: sshpubkeys>=3.1.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (3.3.1)\r\nRequirement already satisfied: responses>=0.9.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.13.3)\r\nRequirement already satisfied: xmltodict in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.12.0)\r\nRequirement already satisfied: setuptools in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (52.0.0.post20210125)\r\nRequirement already satisfied: Jinja2>=2.10.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.11.3)\r\nRequirement already satisfied: zipp in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (3.4.1)\r\nRequirement already satisfied: six>1.9 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.15.0)\r\nRequirement already satisfied: ecdsa<0.15 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.14.1)\r\nRequirement already satisfied: docker>=2.5.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (5.0.0)\r\nRequirement already satisfied: cfn-lint>=0.4.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.49.0)\r\nRequirement already satisfied: grpcio<2,>=1.29.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from apache-beam>=2.24.0->datasets==1.5.0.dev0) (1.32.0)\r\nCollecting hdfs<3.0.0,>=2.1.0\r\n Using cached hdfs-2.6.0-py3-none-any.whl (33 kB)\r\nCollecting pyarrow>=0.17.1\r\n Using cached pyarrow-3.0.0-cp37-cp37m-win_amd64.whl (12.6 MB)\r\nCollecting fastavro<2,>=0.21.4\r\n Using cached fastavro-1.4.0-cp37-cp37m-win_amd64.whl (394 kB)\r\nRequirement already satisfied: httplib2<0.18.0,>=0.8 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from apache-beam>=2.24.0->datasets==1.5.0.dev0) (0.17.4)\r\nCollecting pymongo<4.0.0,>=3.8.0\r\n Using cached pymongo-3.11.3-cp37-cp37m-win_amd64.whl (382 kB)\r\nCollecting crcmod<2.0,>=1.7\r\n Using cached crcmod-1.7-py3-none-any.whl\r\nCollecting avro-python3!=1.9.2,<1.10.0,>=1.8.1\r\n Using cached avro_python3-1.9.2.1-py3-none-any.whl\r\nRequirement already satisfied: typing-extensions<3.8.0,>=3.7.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from apache-beam>=2.24.0->datasets==1.5.0.dev0) (3.7.4.3)\r\nRequirement already satisfied: future<1.0.0,>=0.18.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from apache-beam>=2.24.0->datasets==1.5.0.dev0) (0.18.2)\r\nCollecting oauth2client<5,>=2.0.1\r\n Using cached oauth2client-4.1.3-py2.py3-none-any.whl (98 kB)\r\nCollecting pydot<2,>=1.2.0\r\n Using cached pydot-1.4.2-py2.py3-none-any.whl (21 kB)\r\nRequirement already satisfied: protobuf<4,>=3.12.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from apache-beam>=2.24.0->datasets==1.5.0.dev0) (3.15.8)\r\nRequirement already satisfied: wrapt in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from aws-xray-sdk!=0.96,>=0.93->moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.12.1)\r\nCollecting matplotlib\r\n Using cached matplotlib-3.4.1-cp37-cp37m-win_amd64.whl (7.1 MB)\r\nRequirement already satisfied: junit-xml~=1.9 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.9)\r\nRequirement already satisfied: jsonpatch in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.32)\r\nRequirement already satisfied: jsonschema~=3.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (3.2.0)\r\nRequirement already satisfied: networkx~=2.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.5.1)\r\nRequirement already satisfied: aws-sam-translator>=1.35.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.35.0)\r\nRequirement already satisfied: cffi>=1.12 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cryptography>=2.3.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.14.5)\r\nRequirement already satisfied: pycparser in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cffi>=1.12->cryptography>=2.3.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.20)\r\nRequirement already satisfied: pywin32==227 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from docker>=2.5.1->moto[s3]==1.3.16->datasets==1.5.0.dev0) (227)\r\nRequirement already satisfied: websocket-client>=0.32.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from docker>=2.5.1->moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.58.0)\r\nRequirement already satisfied: docopt in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from hdfs<3.0.0,>=2.1.0->apache-beam>=2.24.0->datasets==1.5.0.dev0) (0.6.2)\r\nRequirement already satisfied: filelock in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from huggingface_hub<0.1.0->datasets==1.5.0.dev0) (3.0.12)\r\nRequirement already satisfied: pyrsistent>=0.14.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from jsonschema~=3.0->cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.17.3)\r\nRequirement already satisfied: attrs>=17.4.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from jsonschema~=3.0->cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (20.3.0)\r\nRequirement already satisfied: decorator<5,>=4.3 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from networkx~=2.4->cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (4.4.2)\r\nRequirement already satisfied: rsa>=3.1.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from oauth2client<5,>=2.0.1->apache-beam>=2.24.0->datasets==1.5.0.dev0) (4.7.2)\r\nRequirement already satisfied: pyasn1-modules>=0.0.5 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from oauth2client<5,>=2.0.1->apache-beam>=2.24.0->datasets==1.5.0.dev0) (0.2.8)\r\nRequirement already satisfied: pyasn1>=0.1.7 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from oauth2client<5,>=2.0.1->apache-beam>=2.24.0->datasets==1.5.0.dev0) (0.4.8)\r\nRequirement already satisfied: pyparsing>=2.1.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pydot<2,>=1.2.0->apache-beam>=2.24.0->datasets==1.5.0.dev0) (2.4.7)\r\nRequirement already satisfied: certifi>=2017.4.17 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from requests>=2.19.0->datasets==1.5.0.dev0) (2020.12.5)\r\nRequirement already satisfied: chardet<5,>=3.0.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from requests>=2.19.0->datasets==1.5.0.dev0) (4.0.0)\r\nCollecting keras-preprocessing~=1.1.2\r\n Using cached Keras_Preprocessing-1.1.2-py2.py3-none-any.whl (42 kB)\r\nRequirement already satisfied: termcolor~=1.1.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorflow>=2.3->datasets==1.5.0.dev0) (1.1.0)\r\nRequirement already satisfied: tensorboard~=2.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorflow>=2.3->datasets==1.5.0.dev0) (2.5.0)\r\nRequirement already satisfied: wheel~=0.35 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorflow>=2.3->datasets==1.5.0.dev0) (0.36.2)\r\nCollecting opt-einsum~=3.3.0\r\n Using cached opt_einsum-3.3.0-py3-none-any.whl (65 kB)\r\nCollecting gast==0.3.3\r\n Using cached gast-0.3.3-py2.py3-none-any.whl (9.7 kB)\r\nCollecting google-pasta~=0.2\r\n Using cached google_pasta-0.2.0-py3-none-any.whl (57 kB)\r\nRequirement already satisfied: tensorflow-estimator<2.5.0,>=2.4.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorflow>=2.3->datasets==1.5.0.dev0) (2.4.0)\r\nCollecting astunparse~=1.6.3\r\n Using cached astunparse-1.6.3-py2.py3-none-any.whl (12 kB)\r\nCollecting flatbuffers~=1.12.0\r\n Using cached flatbuffers-1.12-py2.py3-none-any.whl (15 kB)\r\nCollecting h5py~=2.10.0\r\n Using cached h5py-2.10.0-cp37-cp37m-win_amd64.whl (2.5 MB)\r\nRequirement already satisfied: markdown>=2.6.8 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (3.3.4)\r\nRequirement already satisfied: tensorboard-plugin-wit>=1.6.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (1.8.0)\r\nRequirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (0.4.4)\r\nRequirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (0.6.0)\r\nRequirement already satisfied: google-auth<2,>=1.6.3 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (1.30.0)\r\nRequirement already satisfied: cachetools<5.0,>=2.0.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from google-auth<2,>=1.6.3->tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (4.2.2)\r\nRequirement already satisfied: requests-oauthlib>=0.7.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (1.3.0)\r\nRequirement already satisfied: oauthlib>=3.0.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (3.1.0)\r\nRequirement already satisfied: regex!=2019.12.17 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from transformers->datasets==1.5.0.dev0) (2021.4.4)\r\nRequirement already satisfied: tokenizers<0.11,>=0.10.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from transformers->datasets==1.5.0.dev0) (0.10.2)\r\nRequirement already satisfied: sacremoses in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from transformers->datasets==1.5.0.dev0) (0.0.45)\r\nRequirement already satisfied: packaging in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from transformers->datasets==1.5.0.dev0) (20.9)\r\nCollecting pathspec<1,>=0.8.1\r\n Using cached pathspec-0.8.1-py2.py3-none-any.whl (28 kB)\r\nRequirement already satisfied: click>=7.1.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from black->datasets==1.5.0.dev0) (7.1.2)\r\nCollecting appdirs\r\n Using cached appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)\r\nCollecting mypy-extensions>=0.4.3\r\n Using cached mypy_extensions-0.4.3-py2.py3-none-any.whl (4.5 kB)\r\nRequirement already satisfied: typed-ast>=1.4.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from black->datasets==1.5.0.dev0) (1.4.3)\r\nCollecting beautifulsoup4\r\n Using cached beautifulsoup4-4.9.3-py3-none-any.whl (115 kB)\r\nRequirement already satisfied: soupsieve>1.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from beautifulsoup4->bs4->datasets==1.5.0.dev0) (2.2.1)\r\nCollecting python-Levenshtein\r\n Using cached python-Levenshtein-0.12.2.tar.gz (50 kB)\r\nRequirement already satisfied: jsonpointer>=1.9 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from jsonpatch->cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.1)\r\nRequirement already satisfied: pillow>=6.2.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from matplotlib->bert_score>=0.3.6->datasets==1.5.0.dev0) (8.2.0)\r\nRequirement already satisfied: cycler>=0.10 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from matplotlib->bert_score>=0.3.6->datasets==1.5.0.dev0) (0.10.0)\r\nRequirement already satisfied: kiwisolver>=1.0.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from matplotlib->bert_score>=0.3.6->datasets==1.5.0.dev0) (1.3.1)\r\nCollecting multiprocess\r\n Using cached multiprocess-0.70.11-py3-none-any.whl (98 kB)\r\n Using cached multiprocess-0.70.10.zip (2.4 MB)\r\n Using cached multiprocess-0.70.9-py3-none-any.whl\r\nRequirement already satisfied: joblib in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from nltk->datasets==1.5.0.dev0) (1.0.1)\r\nCollecting et-xmlfile\r\n Using cached et_xmlfile-1.1.0-py3-none-any.whl (4.7 kB)\r\nRequirement already satisfied: pyzstd<0.15.0,>=0.14.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from py7zr->datasets==1.5.0.dev0) (0.14.4)\r\nCollecting pyppmd<0.13.0,>=0.12.1\r\n Using cached pyppmd-0.12.1-cp37-cp37m-win_amd64.whl (32 kB)\r\nCollecting pycryptodome>=3.6.6\r\n Using cached pycryptodome-3.10.1-cp35-abi3-win_amd64.whl (1.6 MB)\r\nCollecting bcj-cffi<0.6.0,>=0.5.1\r\n Using cached bcj_cffi-0.5.1-cp37-cp37m-win_amd64.whl (21 kB)\r\nCollecting multivolumefile<0.3.0,>=0.2.0\r\n Using cached multivolumefile-0.2.3-py3-none-any.whl (17 kB)\r\nRequirement already satisfied: iniconfig in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pytest->datasets==1.5.0.dev0) (1.1.1)\r\nRequirement already satisfied: py>=1.8.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pytest->datasets==1.5.0.dev0) (1.10.0)\r\nRequirement already satisfied: pluggy<1.0.0a1,>=0.12 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pytest->datasets==1.5.0.dev0) (0.13.1)\r\nRequirement already satisfied: atomicwrites>=1.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pytest->datasets==1.5.0.dev0) (1.4.0)\r\nRequirement already satisfied: colorama in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pytest->datasets==1.5.0.dev0) (0.4.4)\r\nCollecting pytest-forked\r\n Using cached pytest_forked-1.3.0-py2.py3-none-any.whl (4.7 kB)\r\nCollecting execnet>=1.1\r\n Using cached execnet-1.8.0-py2.py3-none-any.whl (39 kB)\r\nRequirement already satisfied: apipkg>=1.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from execnet>=1.1->pytest-xdist->datasets==1.5.0.dev0) (1.5)\r\nCollecting portalocker==2.0.0\r\n Using cached portalocker-2.0.0-py2.py3-none-any.whl (11 kB)\r\nRequirement already satisfied: scikit-learn>=0.21.3 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from seqeval->datasets==1.5.0.dev0) (0.24.2)\r\nRequirement already satisfied: threadpoolctl>=2.0.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from scikit-learn>=0.21.3->seqeval->datasets==1.5.0.dev0) (2.1.0)\r\nBuilding wheels for collected packages: python-Levenshtein\r\n Building wheel for python-Levenshtein (setup.py) ... error\r\n ERROR: Command errored out with exit status 1:\r\n command: 'C:\\ProgramData\\Anaconda3\\envs\\env\\python.exe' -u -c 'import sys, setuptools, tokenize; sys.argv[0] = '\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"'; __file__='\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"';f=getattr(tokenize, '\"'\"'open'\"'\"', open)(__file__);code=f.read().replace('\"'\"'\\r\\n'\"'\"', '\"'\"'\\n'\"'\"');f.close();exec(compile(code, __file__, '\"'\"'exec'\"'\"'))' bdist_wheel -d 'C:\\Users\\VKC~1\\AppData\\Local\\Temp\\pip-wheel-8jh7fm18'\r\n cwd: C:\\Users\\VKC~1\\AppData\\Local\\Temp\\pip-install-ynt_dbm4\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\r\n Complete output (27 lines):\r\n running bdist_wheel\r\n running build\r\n running build_py\r\n creating build\r\n creating build\\lib.win-amd64-3.7\r\n creating build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\StringMatcher.py -> build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\__init__.py -> build\\lib.win-amd64-3.7\\Levenshtein\r\n running egg_info\r\n writing python_Levenshtein.egg-info\\PKG-INFO\r\n writing dependency_links to python_Levenshtein.egg-info\\dependency_links.txt\r\n writing entry points to python_Levenshtein.egg-info\\entry_points.txt\r\n writing namespace_packages to python_Levenshtein.egg-info\\namespace_packages.txt\r\n writing requirements to python_Levenshtein.egg-info\\requires.txt\r\n writing top-level names to python_Levenshtein.egg-info\\top_level.txt\r\n reading manifest file 'python_Levenshtein.egg-info\\SOURCES.txt'\r\n reading manifest template 'MANIFEST.in'\r\n warning: no previously-included files matching '*pyc' found anywhere in distribution\r\n warning: no previously-included files matching '*so' found anywhere in distribution\r\n warning: no previously-included files matching '.project' found anywhere in distribution\r\n warning: no previously-included files matching '.pydevproject' found anywhere in distribution\r\n writing manifest file 'python_Levenshtein.egg-info\\SOURCES.txt'\r\n copying Levenshtein\\_levenshtein.c -> build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\_levenshtein.h -> build\\lib.win-amd64-3.7\\Levenshtein\r\n running build_ext\r\n building 'Levenshtein._levenshtein' extension\r\n error: Microsoft Visual C++ 14.0 or greater is required. Get it with \"Microsoft C++ Build Tools\": https:\/\/visualstudio.microsoft.com\/visual-cpp-build-tools\/\r\n ----------------------------------------\r\n ERROR: Failed building wheel for python-Levenshtein\r\n Running setup.py clean for python-Levenshtein\r\nFailed to build python-Levenshtein\r\nInstalling collected packages: python-Levenshtein, pytest-forked, pyppmd, pymongo, pyflakes, pydot, pycryptodome, pycodestyle, pyarrow, portalocker, pathspec, pandas, opt-einsum, oauth2client, nltk, mypy-extensions, multivolumefile, multiprocess, moto, mccabe, matplotlib, keras-preprocessing, huggingface-hub, hdfs, h5py, google-pasta, gast, flatbuffers, fastavro, execnet, et-xmlfile, entrypoints, crcmod, beautifulsoup4, bcj-cffi, avro-python3, astunparse, appdirs, zstandard, tldextract, tensorflow, sklearn, seqeval, sacrebleu, rouge-score, rarfile, pytest-xdist, py7zr, openpyxl, mwparserfromhell, lxml, langdetect, jiwer, isort, flake8, elasticsearch, datasets, conllu, bs4, black, bert-score, apache-beam\r\n Running setup.py install for python-Levenshtein ... error\r\n ERROR: Command errored out with exit status 1:\r\n command: 'C:\\ProgramData\\Anaconda3\\envs\\env\\python.exe' -u -c 'import sys, setuptools, tokenize; sys.argv[0] = '\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"'; __file__='\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"';f=getattr(tokenize, '\"'\"'open'\"'\"', open)(__file__);code=f.read().replace('\"'\"'\\r\\n'\"'\"', '\"'\"'\\n'\"'\"');f.close();exec(compile(code, __file__, '\"'\"'exec'\"'\"'))' install --record 'C:\\Users\\VKC~1\\AppData\\Local\\Temp\\pip-record-v7l7zitb\\install-record.txt' --single-version-externally-managed --compile --install-headers 'C:\\ProgramData\\Anaconda3\\envs\\env\\Include\\python-Levenshtein'\r\n cwd: C:\\Users\\VKC~1\\AppData\\Local\\Temp\\pip-install-ynt_dbm4\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\r\n Complete output (27 lines):\r\n running install\r\n running build\r\n running build_py\r\n creating build\r\n creating build\\lib.win-amd64-3.7\r\n creating build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\StringMatcher.py -> build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\__init__.py -> build\\lib.win-amd64-3.7\\Levenshtein\r\n running egg_info\r\n writing python_Levenshtein.egg-info\\PKG-INFO\r\n writing dependency_links to python_Levenshtein.egg-info\\dependency_links.txt\r\n writing entry points to python_Levenshtein.egg-info\\entry_points.txt\r\n writing namespace_packages to python_Levenshtein.egg-info\\namespace_packages.txt\r\n writing requirements to python_Levenshtein.egg-info\\requires.txt\r\n writing top-level names to python_Levenshtein.egg-info\\top_level.txt\r\n reading manifest file 'python_Levenshtein.egg-info\\SOURCES.txt'\r\n reading manifest template 'MANIFEST.in'\r\n warning: no previously-included files matching '*pyc' found anywhere in distribution\r\n warning: no previously-included files matching '*so' found anywhere in distribution\r\n warning: no previously-included files matching '.project' found anywhere in distribution\r\n warning: no previously-included files matching '.pydevproject' found anywhere in distribution\r\n writing manifest file 'python_Levenshtein.egg-info\\SOURCES.txt'\r\n copying Levenshtein\\_levenshtein.c -> build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\_levenshtein.h -> build\\lib.win-amd64-3.7\\Levenshtein\r\n running build_ext\r\n building 'Levenshtein._levenshtein' extension\r\n error: Microsoft Visual C++ 14.0 or greater is required. Get it with \"Microsoft C++ Build Tools\": https:\/\/visualstudio.microsoft.com\/visual-cpp-build-tools\/\r\n ----------------------------------------\r\nERROR: Command errored out with exit status 1: 'C:\\ProgramData\\Anaconda3\\envs\\env\\python.exe' -u -c 'import sys, setuptools, tokenize; sys.argv[0] = '\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"'; __file__='\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"';f=getattr(tokenize, '\"'\"'open'\"'\"', open)(__file__);code=f.read().replace('\"'\"'\\r\\n'\"'\"', '\"'\"'\\n'\"'\"');f.close();exec(compile(code, __file__, '\"'\"'exec'\"'\"'))' install --record 'C:\\Users\\VKC~1\\AppData\\Local\\Temp\\pip-record-v7l7zitb\\install-record.txt' --single-version-externally-managed --compile --install-headers 'C:\\ProgramData\\Anaconda3\\envs\\env\\Include\\python-Levenshtein' Check the logs for full command output.\r\n```\r\n\r\nHere are conda and python versions:\r\n\r\n```bat\r\n(env) C:\\testing\\datasets>conda --version\r\nconda 4.9.2\r\n\r\n(env) C:\\testing\\datasets>python --version\r\nPython 3.7.10\r\n```\r\n\r\nPlease help me out. Thanks.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2301\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2301\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2300","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2300\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2300\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2300\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2300","id":873928169,"node_id":"MDU6SXNzdWU4NzM5MjgxNjk=","number":2300,"title":"Add VoxPopuli","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-02T12:17:40Z","updated_at":"2021-05-13T10:31:52Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Voxpopuli\r\n- **Description:** VoxPopuli is raw data is collected from 2009-2020 European Parliament event recordings\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2101.00390\r\n- **Data:** https:\/\/github.com\/facebookresearch\/voxpopuli\r\n- **Motivation:** biggest unlabeled speech dataset\r\n\r\n**Note**: Since the dataset is so huge, we should only add the config `10k` in the beginning.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2300\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2300\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2299","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2299\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2299\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2299\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2299","id":873914717,"node_id":"MDU6SXNzdWU4NzM5MTQ3MTc=","number":2299,"title":"My iPhone","user":{"login":"Jasonbuchanan1983","id":82856229,"node_id":"MDQ6VXNlcjgyODU2MjI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/82856229?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983","html_url":"https:\/\/github.com\/Jasonbuchanan1983","followers_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/followers","following_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/orgs","repos_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/repos","events_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-02T11:11:11Z","updated_at":"2021-07-23T09:24:16Z","closed_at":"2021-05-03T08:17:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2299\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2299\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2298","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2298\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2298\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2298\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2298","id":873771942,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI4NDk2NjM2","number":2298,"title":"Mapping in the distributed setting","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-01T21:23:05Z","updated_at":"2021-05-03T13:54:53Z","closed_at":"2021-05-03T13:54:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2298","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2298","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2298.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2298.patch","merged_at":"2021-05-03T13:54:53Z"},"body":"The barrier trick for distributed mapping as discussed on Thursday with @lhoestq","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2298\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2298\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2296","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2296\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2296\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2296\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2296","id":872974907,"node_id":"MDU6SXNzdWU4NzI5NzQ5MDc=","number":2296,"title":"1","user":{"login":"zinnyi","id":82880142,"node_id":"MDQ6VXNlcjgyODgwMTQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/82880142?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zinnyi","html_url":"https:\/\/github.com\/zinnyi","followers_url":"https:\/\/api.github.com\/users\/zinnyi\/followers","following_url":"https:\/\/api.github.com\/users\/zinnyi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zinnyi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zinnyi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zinnyi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zinnyi\/orgs","repos_url":"https:\/\/api.github.com\/users\/zinnyi\/repos","events_url":"https:\/\/api.github.com\/users\/zinnyi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zinnyi\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-30T17:53:49Z","updated_at":"2021-05-03T08:17:31Z","closed_at":"2021-05-03T08:17:31Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2296\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2296\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2295","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2295\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2295\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2295\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2295","id":872902867,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI3NzY0NDk3","number":2295,"title":"Create ExtractManager","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":2,"created_at":"2021-04-30T17:13:34Z","updated_at":"2021-07-12T14:12:03Z","closed_at":"2021-07-08T08:11:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2295","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2295","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2295.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2295.patch","merged_at":"2021-07-08T08:11:49Z"},"body":"Perform refactoring to decouple extract functionality.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2295\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2295\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2294","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2294\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2294\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2294\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2294","id":872136075,"node_id":"MDU6SXNzdWU4NzIxMzYwNzU=","number":2294,"title":"Slow #0 when using map to tokenize.","user":{"login":"VerdureChen","id":31714566,"node_id":"MDQ6VXNlcjMxNzE0NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31714566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VerdureChen","html_url":"https:\/\/github.com\/VerdureChen","followers_url":"https:\/\/api.github.com\/users\/VerdureChen\/followers","following_url":"https:\/\/api.github.com\/users\/VerdureChen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VerdureChen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VerdureChen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VerdureChen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VerdureChen\/orgs","repos_url":"https:\/\/api.github.com\/users\/VerdureChen\/repos","events_url":"https:\/\/api.github.com\/users\/VerdureChen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VerdureChen\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-30T08:00:33Z","updated_at":"2021-05-04T11:00:11Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, _datasets_ is really amazing! I am following [run_mlm_no_trainer.py](url) to pre-train BERT, and it uses `tokenized_datasets = raw_datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n num_proc=args.preprocessing_num_workers,\r\n remove_columns=column_names,\r\n load_from_cache_file=not args.overwrite_cache,\r\n )` to tokenize by multiprocessing. However, I have found that when `num_proc`>1\uff0cthe process _#0_ is much slower than others.\r\nIt looks like this:\r\n![image](https:\/\/user-images.githubusercontent.com\/31714566\/116665555-81246280-a9cc-11eb-8a37-6e608ab310d0.png)\r\nIt takes more than 12 hours for #0, while others just about half an hour. Could anyone tell me it is normal or not, and is there any methods to speed up it?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2294\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2294\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2293","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2293\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2293\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2293\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2293","id":872079385,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI3MDQzNzQ3","number":2293,"title":"imdb dataset from Don't Stop Pretraining Paper","user":{"login":"BobbyManion","id":52530809,"node_id":"MDQ6VXNlcjUyNTMwODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52530809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BobbyManion","html_url":"https:\/\/github.com\/BobbyManion","followers_url":"https:\/\/api.github.com\/users\/BobbyManion\/followers","following_url":"https:\/\/api.github.com\/users\/BobbyManion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BobbyManion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BobbyManion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BobbyManion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BobbyManion\/orgs","repos_url":"https:\/\/api.github.com\/users\/BobbyManion\/repos","events_url":"https:\/\/api.github.com\/users\/BobbyManion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BobbyManion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-30T06:40:48Z","updated_at":"2021-04-30T06:54:25Z","closed_at":"2021-04-30T06:54:25Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2293","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2293","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2293.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2293.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2293\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2293\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2292","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2292\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2292\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2292\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2292","id":871230183,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MjgzNTYy","number":2292,"title":"Fixed typo seperate->separate","user":{"login":"laksh9950","id":32505743,"node_id":"MDQ6VXNlcjMyNTA1NzQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32505743?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/laksh9950","html_url":"https:\/\/github.com\/laksh9950","followers_url":"https:\/\/api.github.com\/users\/laksh9950\/followers","following_url":"https:\/\/api.github.com\/users\/laksh9950\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/laksh9950\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/laksh9950\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/laksh9950\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/laksh9950\/orgs","repos_url":"https:\/\/api.github.com\/users\/laksh9950\/repos","events_url":"https:\/\/api.github.com\/users\/laksh9950\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/laksh9950\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T16:40:53Z","updated_at":"2021-04-30T13:29:18Z","closed_at":"2021-04-30T13:03:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2292","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2292","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2292.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2292.patch","merged_at":"2021-04-30T13:03:12Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2292\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2292\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2291","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2291\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2291\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2291\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2291","id":871216757,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MjcyNzE5","number":2291,"title":"Don't copy recordbatches in memory during a table deepcopy","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T16:26:05Z","updated_at":"2021-04-29T16:34:35Z","closed_at":"2021-04-29T16:34:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2291","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2291","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2291.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2291.patch","merged_at":"2021-04-29T16:34:33Z"},"body":"Fix issue #2276 and hopefully #2134\r\n\r\nThe recordbatches of the `IndexedTableMixin` used to speed up queries to the table were copied in memory during a table deepcopy.\r\nThis resulted in `concatenate_datasets`, `load_from_disk` and other methods to always bring the data in memory.\r\n\r\nI fixed the copy similarly to #2287 and updated the test to make sure it doesn't happen again (added a test for deepcopy + make sure that the immutable arrow objects are passed to the copied table without being copied).\r\n\r\nThe issue was not caught by our tests because the total allocated bytes value in PyArrow isn't updated when deepcopying recordbatches: the copy in memory wasn't detected. This behavior looks like a bug in PyArrow, I'll open a ticket on JIRA.\r\n\r\nThanks @samsontmr , @TaskManager91 and @mariosasko for the help\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2291\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2291\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2290","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2290\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2290\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2290\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2290","id":871145817,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MjEyNTIz","number":2290,"title":"Bbaw egyptian","user":{"login":"phiwi","id":54144149,"node_id":"MDQ6VXNlcjU0MTQ0MTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/54144149?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/phiwi","html_url":"https:\/\/github.com\/phiwi","followers_url":"https:\/\/api.github.com\/users\/phiwi\/followers","following_url":"https:\/\/api.github.com\/users\/phiwi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/phiwi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/phiwi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/phiwi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/phiwi\/orgs","repos_url":"https:\/\/api.github.com\/users\/phiwi\/repos","events_url":"https:\/\/api.github.com\/users\/phiwi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/phiwi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-04-29T15:27:58Z","updated_at":"2021-05-06T17:25:25Z","closed_at":"2021-05-06T17:25:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2290","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2290","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2290.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2290.patch","merged_at":"2021-05-06T17:25:25Z"},"body":"This is the \"hieroglyph corpus\" that I could unfortunately not contribute during the marathon. I re-extracted it again now, so that it is in the state as used in my paper (seee documentation). I hope it satiesfies your requirements and wish every scientist out their loads of fun deciphering a 5.000 years old language :-)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2290\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":3,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2290\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2289","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2289\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2289\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2289\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2289","id":871118573,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MTg5MDU3","number":2289,"title":"Allow collaborators to self-assign issues","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-29T15:07:06Z","updated_at":"2021-04-30T18:28:16Z","closed_at":"2021-04-30T18:28:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2289","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2289","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2289.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2289.patch","merged_at":"2021-04-30T18:28:16Z"},"body":"Allow collaborators (without write access to the repository) to self-assign issues.\r\n\r\nIn order to self-assign an issue, they have to comment it with the word: `#take` or `#self-assign`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2289\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2289\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2288","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2288\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2288\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2288\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2288","id":871111235,"node_id":"MDU6SXNzdWU4NzExMTEyMzU=","number":2288,"title":"Load_dataset for local CSV files","user":{"login":"sstojanoska","id":17052700,"node_id":"MDQ6VXNlcjE3MDUyNzAw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17052700?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sstojanoska","html_url":"https:\/\/github.com\/sstojanoska","followers_url":"https:\/\/api.github.com\/users\/sstojanoska\/followers","following_url":"https:\/\/api.github.com\/users\/sstojanoska\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sstojanoska\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sstojanoska\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sstojanoska\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sstojanoska\/orgs","repos_url":"https:\/\/api.github.com\/users\/sstojanoska\/repos","events_url":"https:\/\/api.github.com\/users\/sstojanoska\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sstojanoska\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-29T15:01:10Z","updated_at":"2021-06-15T13:49:26Z","closed_at":"2021-06-15T13:49:26Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The method load_dataset fails to correctly load a dataset from csv. \r\n\r\nMoreover, I am working on a token-classification task ( POS tagging) , where each row in my CSV contains two columns each of them having a list of strings.\r\nrow example:\r\n```tokens | labels\r\n['I' , 'am', 'John'] | ['PRON', 'AUX', 'PROPN' ] \r\n```\r\nThe method, loads each list as a string: (i.g \"['I' , 'am', 'John']\").\r\nTo solve this issue, I copied the Datasets.Features, created Sequence types ( instead of Value) and tried to cast the features type\r\n```\r\nnew_features['tokens'] = Sequence(feature=Value(dtype='string', id=None))\r\nnew_features['labels'] = Sequence(feature=ClassLabel(num_classes=len(tag2idx), names=list(unique_tags)))\r\ndataset = dataset.cast(new_features)\r\n```\r\nbut I got the following error \r\n```\r\nArrowNotImplementedError: Unsupported cast from string to list using function cast_list\r\n```\r\nMoreover, I tried to set feature parameter in load_dataset method, to my new_features, but this fails as well.\r\nHow can this be solved ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2288\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2288\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2287","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2287\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2287\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2287\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2287","id":871063374,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MTQ0MTQ3","number":2287,"title":"Avoid copying table's record batches","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-29T14:15:01Z","updated_at":"2021-04-29T16:34:23Z","closed_at":"2021-04-29T16:34:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2287","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2287","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2287.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2287.patch","merged_at":null},"body":"Fixes #2276","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2287\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2287\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2286","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2286\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2286\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2286\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2286","id":871032393,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MTE5MTE2","number":2286,"title":"Fix metadata validation with config names","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T13:44:32Z","updated_at":"2021-04-29T14:07:29Z","closed_at":"2021-04-29T14:07:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2286","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2286","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2286.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2286.patch","merged_at":"2021-04-29T14:07:28Z"},"body":"I noticed in https:\/\/github.com\/huggingface\/datasets\/pull\/2280 that the metadata validator doesn't parse the tags in the readme properly when then contain the tags per config.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2286\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2286\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2285","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2285\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2285\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2285\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2285","id":871005236,"node_id":"MDU6SXNzdWU4NzEwMDUyMzY=","number":2285,"title":"Help understanding how to build a dataset for language modeling as with the old TextDataset","user":{"login":"danieldiezmallo","id":46021411,"node_id":"MDQ6VXNlcjQ2MDIxNDEx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46021411?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danieldiezmallo","html_url":"https:\/\/github.com\/danieldiezmallo","followers_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/followers","following_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/orgs","repos_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/repos","events_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-29T13:16:45Z","updated_at":"2021-05-19T07:22:45Z","closed_at":"2021-05-19T07:22:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\n\r\nI am trying to load a custom dataset that I will then use for language modeling. The dataset consists of a text file that has a whole document in each line, meaning that each line overpasses the normal 512 tokens limit of most tokenizers.\r\n\r\nI would like to understand what is the process to build a text dataset that tokenizes each line, having previously split the documents in the dataset into lines of a \"tokenizable\" size, as the old TextDataset class would do, where you only had to do the following, and a tokenized dataset without text loss would be available to pass to a DataCollator:\r\n\r\n```\r\nmodel_checkpoint = 'distilbert-base-uncased'\r\n\r\nfrom transformers import AutoTokenizer\r\ntokenizer = AutoTokenizer.from_pretrained(model_checkpoint)\r\n\r\nfrom transformers import TextDataset\r\n\r\ndataset = TextDataset(\r\n tokenizer=tokenizer,\r\n file_path=\"path\/to\/text_file.txt\",\r\n block_size=512,\r\n)\r\n```\r\n\r\nFor now, what I have is the following, which, of course, throws an error because each line is longer than the maximum block size in the tokenizer:\r\n\r\n```\r\nimport datasets\r\ndataset = datasets.load_dataset('path\/to\/text_file.txt')\r\n\r\nmodel_checkpoint = 'distilbert-base-uncased'\r\ntokenizer = AutoTokenizer.from_pretrained(model_checkpoint)\r\n\r\ndef tokenize_function(examples):\r\n return tokenizer(examples[\"text\"])\r\n\r\ntokenized_datasets = dataset.map(tokenize_function, batched=True, num_proc=4, remove_columns=[\"text\"])\r\n\r\ntokenized_datasets\r\n```\r\n\r\nSo what would be the \"standard\" way of creating a dataset in the way it was done before?\r\n\r\nThank you very much for the help :))","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2285\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2285\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2284","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2284\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2284\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2284\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2284","id":870932710,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MDM5MDc5","number":2284,"title":"Initialize Imdb dataset as used in Don't Stop Pretraining Paper","user":{"login":"BobbyManion","id":52530809,"node_id":"MDQ6VXNlcjUyNTMwODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52530809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BobbyManion","html_url":"https:\/\/github.com\/BobbyManion","followers_url":"https:\/\/api.github.com\/users\/BobbyManion\/followers","following_url":"https:\/\/api.github.com\/users\/BobbyManion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BobbyManion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BobbyManion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BobbyManion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BobbyManion\/orgs","repos_url":"https:\/\/api.github.com\/users\/BobbyManion\/repos","events_url":"https:\/\/api.github.com\/users\/BobbyManion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BobbyManion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T11:52:38Z","updated_at":"2021-04-29T12:54:34Z","closed_at":"2021-04-29T12:54:34Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2284","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2284","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2284.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2284.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2284\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2284\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2283","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2283\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2283\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2283\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2283","id":870926475,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MDM0MDk5","number":2283,"title":"Initialize imdb dataset from don't stop pretraining paper","user":{"login":"BobbyManion","id":52530809,"node_id":"MDQ6VXNlcjUyNTMwODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52530809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BobbyManion","html_url":"https:\/\/github.com\/BobbyManion","followers_url":"https:\/\/api.github.com\/users\/BobbyManion\/followers","following_url":"https:\/\/api.github.com\/users\/BobbyManion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BobbyManion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BobbyManion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BobbyManion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BobbyManion\/orgs","repos_url":"https:\/\/api.github.com\/users\/BobbyManion\/repos","events_url":"https:\/\/api.github.com\/users\/BobbyManion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BobbyManion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T11:44:54Z","updated_at":"2021-04-29T11:50:24Z","closed_at":"2021-04-29T11:50:24Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2283","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2283","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2283.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2283.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2283\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2283\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2282","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2282\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2282\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2282\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2282","id":870900332,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MDEyMzM3","number":2282,"title":"Initialize imdb dataset from don't stop pretraining paper","user":{"login":"BobbyManion","id":52530809,"node_id":"MDQ6VXNlcjUyNTMwODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52530809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BobbyManion","html_url":"https:\/\/github.com\/BobbyManion","followers_url":"https:\/\/api.github.com\/users\/BobbyManion\/followers","following_url":"https:\/\/api.github.com\/users\/BobbyManion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BobbyManion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BobbyManion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BobbyManion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BobbyManion\/orgs","repos_url":"https:\/\/api.github.com\/users\/BobbyManion\/repos","events_url":"https:\/\/api.github.com\/users\/BobbyManion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BobbyManion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T11:17:56Z","updated_at":"2021-04-29T11:43:51Z","closed_at":"2021-04-29T11:43:51Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2282","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2282","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2282.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2282.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2282\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2282\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2281","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2281\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2281\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2281\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2281","id":870792784,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI1OTI2MjAw","number":2281,"title":"Update multi_woz_v22 checksum","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T09:09:11Z","updated_at":"2021-04-29T13:41:35Z","closed_at":"2021-04-29T13:41:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2281","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2281","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2281.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2281.patch","merged_at":"2021-04-29T13:41:34Z"},"body":"Fix issue https:\/\/github.com\/huggingface\/datasets\/issues\/1876\r\nThe files were changed in https:\/\/github.com\/budzianowski\/multiwoz\/pull\/72","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2281\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2281\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2280","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2280\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2280\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2280\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2280","id":870780431,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI1OTE2Mzcy","number":2280,"title":"Fixed typo seperate->separate","user":{"login":"laksh9950","id":32505743,"node_id":"MDQ6VXNlcjMyNTA1NzQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32505743?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/laksh9950","html_url":"https:\/\/github.com\/laksh9950","followers_url":"https:\/\/api.github.com\/users\/laksh9950\/followers","following_url":"https:\/\/api.github.com\/users\/laksh9950\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/laksh9950\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/laksh9950\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/laksh9950\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/laksh9950\/orgs","repos_url":"https:\/\/api.github.com\/users\/laksh9950\/repos","events_url":"https:\/\/api.github.com\/users\/laksh9950\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/laksh9950\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-29T08:55:46Z","updated_at":"2021-04-29T16:41:22Z","closed_at":"2021-04-29T16:41:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2280","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2280","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2280.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2280.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2280\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2280\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2279","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2279\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2279\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2279\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2279","id":870431662,"node_id":"MDU6SXNzdWU4NzA0MzE2NjI=","number":2279,"title":"Compatibility with Ubuntu 18 and GLIBC 2.27?","user":{"login":"tginart","id":11379648,"node_id":"MDQ6VXNlcjExMzc5NjQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11379648?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tginart","html_url":"https:\/\/github.com\/tginart","followers_url":"https:\/\/api.github.com\/users\/tginart\/followers","following_url":"https:\/\/api.github.com\/users\/tginart\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tginart\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tginart\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tginart\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tginart\/orgs","repos_url":"https:\/\/api.github.com\/users\/tginart\/repos","events_url":"https:\/\/api.github.com\/users\/tginart\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tginart\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-28T22:08:07Z","updated_at":"2021-04-29T07:42:42Z","closed_at":"2021-04-29T07:42:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nFor use on Ubuntu systems, it seems that datasets requires GLIBC 2.29. However, Ubuntu 18 runs with GLIBC 2.27 and it seems [non-trivial to upgrade GLIBC to 2.29 for Ubuntu 18 users](https:\/\/www.digitalocean.com\/community\/questions\/how-install-glibc-2-29-or-higher-in-ubuntu-18-04). \r\n\r\nI'm not sure if there is anything that can be done about this, but I'd like to confirm that using huggingface\/datasets requires either an upgrade to Ubuntu 19\/20 or a hand-rolled install of a higher version of GLIBC.\r\n\r\n## Steps to reproduce the bug\r\n1. clone the transformers repo\r\n2. move to examples\/pytorch\/language-modeling\r\n3. run example command:\r\n```python run_clm.py --model_name_or_path gpt2 --dataset_name wikitext --dataset_config_name wikitext-2-raw-v1 --do_train --do_eval --output_dir \/tmp\/test-clm```\r\n\r\n\r\n## Expected results\r\nAs described in the transformers repo.\r\n\r\n## Actual results\r\n```Traceback (most recent call last):\r\n File \"run_clm.py\", line 34, in \r\n from transformers import (\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/__init__.py\", line 2487, in __getattr__\r\n return super().__getattr__(name)\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/file_utils.py\", line 1699, in __getattr__\r\n module = self._get_module(self._class_to_module[name])\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/__init__.py\", line 2481, in _get_module\r\n return importlib.import_module(\".\" + module_name, self.__name__)\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/importlib\/__init__.py\", line 127, in import_module\r\n return _bootstrap._gcd_import(name[level:], package, level)\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/models\/__init__.py\", line 19, in \r\n from . import (\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/models\/layoutlm\/__init__.py\", line 23, in \r\n from .tokenization_layoutlm import LayoutLMTokenizer\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/models\/layoutlm\/tokenization_layoutlm.py\", line 19, in \r\n from ..bert.tokenization_bert import BertTokenizer\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/models\/bert\/tokenization_bert.py\", line 23, in \r\n from ...tokenization_utils import PreTrainedTokenizer, _is_control, _is_punctuation, _is_whitespace\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils.py\", line 26, in \r\n from .tokenization_utils_base import (\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 68, in \r\n from tokenizers import AddedToken\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/tokenizers\/__init__.py\", line 79, in \r\n from .tokenizers import (\r\nImportError: \/lib\/x86_64-linux-gnu\/libm.so.6: version `GLIBC_2.29' not found (required by \/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/tokenizers\/tokenizers.cpython-37m-x86_64-linux-gnu.so)\r\n```\r\n\r\n## Versions\r\nPaste the output of the following code:\r\n```\r\n- Datasets: 1.6.1\r\n- Python: 3.7.10 (default, Feb 26 2021, 18:47:35) \r\n[GCC 7.3.0]\r\n- Platform: Linux-4.15.0-128-generic-x86_64-with-debian-buster-sid\r\n\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2279\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2279\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2278","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2278\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2278\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2278\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2278","id":870088059,"node_id":"MDU6SXNzdWU4NzAwODgwNTk=","number":2278,"title":"Loss result inGptNeoForCasual","user":{"login":"Yossillamm","id":51174606,"node_id":"MDQ6VXNlcjUxMTc0NjA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/51174606?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Yossillamm","html_url":"https:\/\/github.com\/Yossillamm","followers_url":"https:\/\/api.github.com\/users\/Yossillamm\/followers","following_url":"https:\/\/api.github.com\/users\/Yossillamm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Yossillamm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Yossillamm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Yossillamm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Yossillamm\/orgs","repos_url":"https:\/\/api.github.com\/users\/Yossillamm\/repos","events_url":"https:\/\/api.github.com\/users\/Yossillamm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Yossillamm\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-28T15:39:52Z","updated_at":"2021-05-06T16:14:23Z","closed_at":"2021-05-06T16:14:23Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Is there any way you give the \" loss\" and \"logits\" results in the gpt neo api? ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2278\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2278\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2277","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2277\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2277\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2277\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2277","id":870071994,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI1MzI5NjIz","number":2277,"title":"Create CacheManager","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/8","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8\/labels","id":6968069,"node_id":"MI_kwDODunzps4AalMF","number":8,"title":"1.12","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":4,"closed_issues":2,"state":"open","created_at":"2021-07-21T15:34:56Z","updated_at":"2021-10-13T10:26:33Z","due_on":"2021-08-30T07:00:00Z","closed_at":null},"comments":0,"created_at":"2021-04-28T15:23:42Z","updated_at":"2021-09-02T05:33:31Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2277","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2277","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2277.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2277.patch","merged_at":null},"body":"Perform refactoring to decouple cache functionality (method `as_dataset`).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2277\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2277\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2276","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2276\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2276\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2276\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2276","id":870010511,"node_id":"MDU6SXNzdWU4NzAwMTA1MTE=","number":2276,"title":"concatenate_datasets loads all the data into memory","user":{"login":"TaskManager91","id":7063207,"node_id":"MDQ6VXNlcjcwNjMyMDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7063207?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TaskManager91","html_url":"https:\/\/github.com\/TaskManager91","followers_url":"https:\/\/api.github.com\/users\/TaskManager91\/followers","following_url":"https:\/\/api.github.com\/users\/TaskManager91\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TaskManager91\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TaskManager91\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TaskManager91\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TaskManager91\/orgs","repos_url":"https:\/\/api.github.com\/users\/TaskManager91\/repos","events_url":"https:\/\/api.github.com\/users\/TaskManager91\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TaskManager91\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2021-04-28T14:27:21Z","updated_at":"2021-05-03T08:41:55Z","closed_at":"2021-05-03T08:41:55Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nWhen I try to concatenate 2 datasets (10GB each) , the entire data is loaded into memory instead of being written directly to disk.\r\n\r\nInterestingly, this happens when trying to save the new dataset to disk or concatenating it again.\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/7063207\/116420321-2b21b480-a83e-11eb-9006-8f6ca729fb6f.png)\r\n\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import concatenate_datasets, load_from_disk\r\n\r\ntest_sampled_pro = load_from_disk(\"test_sampled_pro\")\r\nval_sampled_pro = load_from_disk(\"val_sampled_pro\")\r\n\r\nbig_set = concatenate_datasets([test_sampled_pro, val_sampled_pro])\r\n\r\n# Loaded to memory\r\nbig_set.save_to_disk(\"big_set\")\r\n\r\n# Loaded to memory\r\nbig_set = concatenate_datasets([big_set, val_sampled_pro])\r\n```\r\n\r\n## Expected results\r\nThe data should be loaded into memory in batches and then saved directly to disk.\r\n\r\n## Actual results\r\nThe entire data set is loaded into the memory and then saved to the hard disk.\r\n\r\n## Versions\r\nPaste the output of the following code:\r\n```python\r\n- Datasets: 1.6.1\r\n- Python: 3.8.8 (default, Apr 13 2021, 19:58:26) \r\n[GCC 7.3.0]\r\n- Platform: Linux-5.4.72-microsoft-standard-WSL2-x86_64-with-glibc2.10\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2276\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2276\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2275","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2275\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2275\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2275\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2275","id":869378311,"node_id":"MDU6SXNzdWU4NjkzNzgzMTE=","number":2275,"title":"SNLI dataset has labels of -1 ","user":{"login":"puzzler10","id":17426779,"node_id":"MDQ6VXNlcjE3NDI2Nzc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17426779?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/puzzler10","html_url":"https:\/\/github.com\/puzzler10","followers_url":"https:\/\/api.github.com\/users\/puzzler10\/followers","following_url":"https:\/\/api.github.com\/users\/puzzler10\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/puzzler10\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/puzzler10\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/puzzler10\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/puzzler10\/orgs","repos_url":"https:\/\/api.github.com\/users\/puzzler10\/repos","events_url":"https:\/\/api.github.com\/users\/puzzler10\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/puzzler10\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-28T00:32:25Z","updated_at":"2021-05-17T13:34:18Z","closed_at":"2021-05-17T13:34:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"There are a number of rows with a label of -1 in the SNLI dataset. The dataset descriptions [here](https:\/\/nlp.stanford.edu\/projects\/snli\/) and [here](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/snli) don't list -1 as a label possibility, and neither does the dataset viewer. As examples, see index 107 or 124 of the test set.\r\n\r\nIt isn't clear what these labels mean. I found a [line of code](https:\/\/github.com\/huggingface\/datasets\/blob\/80e59ef178d3bb2090d091bc32315c655eb0633d\/datasets\/snli\/snli.py#L94) that seems to put them in but it seems still unclear why they are there. The current workaround is to just drop the rows from any model being trained. \r\n\r\nPerhaps the documentation should be updated.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2275\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2275\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2274","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2274\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2274\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2274\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2274","id":869186276,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI0NTkyMjQx","number":2274,"title":"Always update metadata in arrow schema","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-27T19:21:57Z","updated_at":"2021-04-29T09:57:51Z","closed_at":"2021-04-29T09:57:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2274","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2274","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2274.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2274.patch","merged_at":"2021-04-29T09:57:50Z"},"body":"We store a redundant copy of the features in the metadata of the schema of the arrow table. This is used to recover the features when doing `Dataset.from_file`. These metadata are updated after each transfor, that changes the feature types.\r\n\r\nFor each function that transforms the feature types of the dataset, I added a step in the tests to make sure the metadata in the arrow schema are up to date.\r\n\r\nI also added a line to update the metadata directly in the Dataset.__init__ method.\r\nThis way even a dataset instantiated with __init__ will have a table with the right metadata.\r\n\r\ncc @mariosasko ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2274\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2274\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2273","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2273\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2273\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2273\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2273","id":869046290,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI0NDcxODc1","number":2273,"title":"Added CUAD metrics","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-27T16:49:12Z","updated_at":"2021-04-29T13:59:47Z","closed_at":"2021-04-29T13:59:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2273","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2273","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2273.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2273.patch","merged_at":"2021-04-29T13:59:47Z"},"body":"`EM`, `F1`, `AUPR`, `Precision@80%Recall`, and `Precision@90%Recall` metrics supported for CUAD","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2273\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2273\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2272","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2272\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2272\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2272\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2272","id":869017977,"node_id":"MDU6SXNzdWU4NjkwMTc5Nzc=","number":2272,"title":"Bug in Dataset.class_encode_column","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-27T16:13:18Z","updated_at":"2021-04-30T12:54:27Z","closed_at":"2021-04-30T12:54:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nAll the rest of the columns except the one passed to `Dataset.class_encode_column` are discarded.\r\n\r\n## Expected results\r\n\r\nAll the original columns should be kept.\r\n\r\nThis needs regression tests.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2272\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2272\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2271","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2271\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2271\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2271\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2271","id":869002141,"node_id":"MDU6SXNzdWU4NjkwMDIxNDE=","number":2271,"title":"Synchronize table metadata with features","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-27T15:55:13Z","updated_at":"2021-04-28T12:48:25Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nAs pointed out in this [comment](https:\/\/github.com\/huggingface\/datasets\/pull\/2145#discussion_r621326767):\r\n> Metadata stored in the schema is just a redundant information regarding the feature types.\r\nIt is used when calling Dataset.from_file to know which feature types to use.\r\nThese metadata are stored in the schema of the pyarrow table by using `update_metadata_with_features`.\r\nHowever this something that's almost never tested properly.\r\n\r\n**Describe the solution you'd like**\r\n\r\nWe should find a way to always make sure that the metadata (in `self.data.schema.metadata`) are synced with the actual feature types (in `self.info.features`).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2271\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2271\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2270","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2270\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2270\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2270\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2270","id":868913660,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI0MzU5Njky","number":2270,"title":"Fix iterable interface expected by numpy","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-27T14:35:56Z","updated_at":"2021-04-28T17:39:27Z","closed_at":"2021-04-28T17:39:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2270","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2270","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2270.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2270.patch","merged_at":null},"body":"Numpy expects the old iterable interface with `__getitem__` instead of `__iter__`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2270\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2270\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2269","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2269\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2269\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2269\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2269","id":868878468,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI0MzMwNDA3","number":2269,"title":"Fix query table with iterable","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-27T13:59:38Z","updated_at":"2021-04-27T14:21:57Z","closed_at":"2021-04-27T14:21:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2269","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2269","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2269.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2269.patch","merged_at":"2021-04-27T14:21:56Z"},"body":"The benchmark runs are failing on master because it tries to use an iterable to query the dataset.\r\nHowever there's currently an issue caused by the use of `np.array` instead of `np.fromiter` on the iterable.\r\nThis PR fixes it","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2269\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2269\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2268","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2268\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2268\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2268\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2268","id":868773380,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI0MjQyODg1","number":2268,"title":"Don't use pyarrow 4.0.0 since it segfaults when casting a sliced ListArray of integers","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-27T11:58:28Z","updated_at":"2021-06-12T12:44:49Z","closed_at":"2021-04-27T13:43:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2268","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2268","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2268.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2268.patch","merged_at":"2021-04-27T13:43:20Z"},"body":"This test `tests\/test_table.py::test_concatenation_table_cast` segfaults with the latest update of pyarrow 4.0.0.\r\nSetting `pyarrow<4.0.0` for now. I'll open an issue on JIRA once I know more about the origin of the issue","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2268\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2268\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2267","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2267\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2267\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2267\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2267","id":868291129,"node_id":"MDU6SXNzdWU4NjgyOTExMjk=","number":2267,"title":"DatasetDict save load Failing test in 1.6 not in 1.5","user":{"login":"timothyjlaurent","id":2000204,"node_id":"MDQ6VXNlcjIwMDAyMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2000204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timothyjlaurent","html_url":"https:\/\/github.com\/timothyjlaurent","followers_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/followers","following_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/orgs","repos_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/repos","events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-04-27T00:03:25Z","updated_at":"2021-05-28T15:27:34Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nWe have a test that saves a DatasetDict to disk and then loads it from disk. In 1.6 there is an incompatibility in the schema.\r\n\r\n\r\n\r\n\r\nDowngrading to `>1.6` -- fixes the problem.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n\r\n### Load a dataset dict from jsonl \r\n\r\npath = '\/test\/foo'\r\n\r\nds_dict.save_to_disk(path)\r\n\r\nds_from_disk = DatasetDict.load_from_disk(path). ## <-- this is where I see the error on 1.6\r\n```\r\n\r\n## Expected results\r\n\r\nUpgrading to 1.6 shouldn't break that test. We should be able to serialize to and from disk.\r\n\r\n## Actual results\r\n```\r\n # Infer features if None\r\n inferred_features = Features.from_arrow_schema(arrow_table.schema)\r\n if self.info.features is None:\r\n self.info.features = inferred_features\r\n \r\n # Infer fingerprint if None\r\n \r\n if self._fingerprint is None:\r\n self._fingerprint = generate_fingerprint(self)\r\n \r\n # Sanity checks\r\n \r\n assert self.features is not None, \"Features can't be None in a Dataset object\"\r\n assert self._fingerprint is not None, \"Fingerprint can't be None in a Dataset object\"\r\n if self.info.features.type != inferred_features.type:\r\n> raise ValueError(\r\n \"External features info don't match the dataset:\\nGot\\n{}\\nwith type\\n{}\\n\\nbut expected something like\\n{}\\nwith type\\n{}\".format(\r\n self.info.features, self.info.features.type, inferred_features, inferred_features.type\r\n )\r\n )\r\nE ValueError: External features info don't match the dataset:\r\nE Got\r\nE {'_input_hash': Value(dtype='int64', id=None), '_task_hash': Value(dtype='int64', id=None), '_view_id': Value(dtype='string', id=None), 'answer': Value(dtype='string', id=None), 'encoding__ids': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'encoding__offsets': Sequence(feature=Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), length=-1, id=None), 'encoding__overflowing': Sequence(feature=Value(dtype='null', id=None), length=-1, id=None), 'encoding__tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'encoding__words': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'ner_ids': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'ner_labels': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'relations': [{'child': Value(dtype='int64', id=None), 'child_span': {'end': Value(dtype='int64', id=None), 'label': Value(dtype='string', id=None), 'start': Value(dtype='int64', id=None), 'token_end': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None)}, 'color': Value(dtype='string', id=None), 'head': Value(dtype='int64', id=None), 'head_span': {'end': Value(dtype='int64', id=None), 'label': Value(dtype='string', id=None), 'start': Value(dtype='int64', id=None), 'token_end': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None)}, 'label': Value(dtype='string', id=None)}], 'spans': [{'end': Value(dtype='int64', id=None), 'label': Value(dtype='string', id=None), 'start': Value(dtype='int64', id=None), 'text': Value(dtype='string', id=None), 'token_end': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None), 'type': Value(dtype='string', id=None)}], 'text': Value(dtype='string', id=None), 'tokens': [{'disabled': Value(dtype='bool', id=None), 'end': Value(dtype='int64', id=None), 'id': Value(dtype='int64', id=None), 'start': Value(dtype='int64', id=None), 'text': Value(dtype='string', id=None), 'ws': Value(dtype='bool', id=None)}]}\r\nE with type\r\nE struct<_input_hash: int64, _task_hash: int64, _view_id: string, answer: string, encoding__ids: list, encoding__offsets: list>, encoding__overflowing: list, encoding__tokens: list, encoding__words: list, ner_ids: list, ner_labels: list, relations: list, color: string, head: int64, head_span: struct, label: string>>, spans: list>, text: string, tokens: list>>\r\nE \r\nE but expected something like\r\nE {'_input_hash': Value(dtype='int64', id=None), '_task_hash': Value(dtype='int64', id=None), '_view_id': Value(dtype='string', id=None), 'answer': Value(dtype='string', id=None), 'encoding__ids': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'encoding__offsets': Sequence(feature=Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), length=-1, id=None), 'encoding__overflowing': Sequence(feature=Value(dtype='null', id=None), length=-1, id=None), 'encoding__tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'encoding__words': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'ner_ids': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'ner_labels': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'relations': [{'head': Value(dtype='int64', id=None), 'child': Value(dtype='int64', id=None), 'head_span': {'start': Value(dtype='int64', id=None), 'end': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None), 'token_end': Value(dtype='int64', id=None), 'label': Value(dtype='string', id=None)}, 'child_span': {'start': Value(dtype='int64', id=None), 'end': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None), 'token_end': Value(dtype='int64', id=None), 'label': Value(dtype='string', id=None)}, 'color': Value(dtype='string', id=None), 'label': Value(dtype='string', id=None)}], 'spans': [{'text': Value(dtype='string', id=None), 'start': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None), 'token_end': Value(dtype='int64', id=None), 'end': Value(dtype='int64', id=None), 'type': Value(dtype='string', id=None), 'label': Value(dtype='string', id=None)}], 'text': Value(dtype='string', id=None), 'tokens': [{'text': Value(dtype='string', id=None), 'start': Value(dtype='int64', id=None), 'end': Value(dtype='int64', id=None), 'id': Value(dtype='int64', id=None), 'ws': Value(dtype='bool', id=None), 'disabled': Value(dtype='bool', id=None)}]}\r\nE with type\r\nE struct<_input_hash: int64, _task_hash: int64, _view_id: string, answer: string, encoding__ids: list, encoding__offsets: list>, encoding__overflowing: list, encoding__tokens: list, encoding__words: list, ner_ids: list, ner_labels: list, relations: list , child_span: struct, color: string, label: string>>, spans: list>, text: string, tokens: list>>\r\n\r\n..\/..\/..\/..\/..\/.virtualenvs\/tf_ner_rel_lib\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py:274: ValueError\r\n```\r\n## Versions\r\n- Datasets: 1.6.1\r\n- Python: 3.8.5 (default, Jan 26 2021, 10:01:04) \r\n[Clang 12.0.0 (clang-1200.0.32.2)]\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2267\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2267\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2266","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2266\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2266\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2266\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2266","id":867864353,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIzNDY1OTI5","number":2266,"title":"Make tests run faster","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-26T15:55:40Z","updated_at":"2021-04-29T10:00:13Z","closed_at":"2021-04-29T10:00:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2266","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2266","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2266.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2266.patch","merged_at":"2021-04-29T10:00:04Z"},"body":"From 7min to 2min to run pytest.\r\nIdeally we should keep the whole CI run time below 10min.\r\n\r\nIn this PR I removed the remote tests that were never used.\r\nI also replaced nested parametrized tests with unit tests.\r\nThis makes me think that we could still add more high level tests to check for a few combinations of parameters (but not all of them since there are too many of them).\r\nLet me know what you think\r\n\r\nFinally in another PR we can also separate in two circleci jobs:\r\n- the tests of the code code of the lib\r\n- the tests of the all the dataset\/metric scripts.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2266\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2266\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2265","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2265\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2265\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2265\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2265","id":867490646,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIzMTUyOTg5","number":2265,"title":"Update black","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-26T09:35:09Z","updated_at":"2021-04-26T09:47:48Z","closed_at":"2021-04-26T09:47:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2265","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2265","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2265.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2265.patch","merged_at":"2021-04-26T09:47:47Z"},"body":"Latest black version 21.4b0 requires to reformat most dataset scripts and also the core code of the lib.\r\nThis makes the CI currently fail on master","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2265\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2265\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2264","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2264\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2264\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2264\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2264","id":867476228,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIzMTQwODA1","number":2264,"title":"Fix memory issue in multiprocessing: Don't pickle table index","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-04-26T09:21:35Z","updated_at":"2021-04-26T10:30:28Z","closed_at":"2021-04-26T10:08:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2264","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2264","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2264.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2264.patch","merged_at":"2021-04-26T10:08:14Z"},"body":"The table index is currently being pickled when doing multiprocessing, which brings all the record batches of the dataset in memory.\r\n\r\nI fixed that by not pickling the index attributes. Therefore each process has to rebuild the index when unpickling the table.\r\n\r\nFix issue #2256\r\n\r\nWe'll do a patch release asap !","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2264\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2264\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2263","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2263\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2263\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2263\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2263","id":867420912,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIzMDk0NTcy","number":2263,"title":"test data added, dataset_infos updated","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-26T08:27:18Z","updated_at":"2021-04-29T09:30:21Z","closed_at":"2021-04-29T09:30:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2263","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2263","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2263.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2263.patch","merged_at":"2021-04-29T09:30:20Z"},"body":"Fixes #2262. Thanks for pointing out issue with dataset @jinmang2!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2263\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2263\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2262","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2262\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2262\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2262\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2262","id":867325351,"node_id":"MDU6SXNzdWU4NjczMjUzNTE=","number":2262,"title":"NewsPH NLI dataset script fails to access test data.","user":{"login":"jinmang2","id":37775784,"node_id":"MDQ6VXNlcjM3Nzc1Nzg0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37775784?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jinmang2","html_url":"https:\/\/github.com\/jinmang2","followers_url":"https:\/\/api.github.com\/users\/jinmang2\/followers","following_url":"https:\/\/api.github.com\/users\/jinmang2\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jinmang2\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jinmang2\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jinmang2\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jinmang2\/orgs","repos_url":"https:\/\/api.github.com\/users\/jinmang2\/repos","events_url":"https:\/\/api.github.com\/users\/jinmang2\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jinmang2\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-26T06:44:41Z","updated_at":"2021-04-29T09:32:03Z","closed_at":"2021-04-29T09:30:20Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In Newsph-NLI Dataset (#1192), it fails to access test data.\r\n\r\nAccording to the script below, the download manager will download the train data when trying to download the test data. \r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/2a2dd6316af2cc7fdf24e4779312e8ee0c7ed98b\/datasets\/newsph_nli\/newsph_nli.py#L71\r\n\r\nIf you download it according to the script above, you can see that train and test receive the same data as shown below.\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> newsph_nli = load_dataset(path=\".\/datasets\/newsph_nli.py\")\r\n>>> newsph_nli\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 420000\r\n })\r\n test: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 420000\r\n })\r\n validation: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 90000\r\n })\r\n})\r\n>>> newsph_nli[\"train\"][0]\r\n{'hypothesis': 'Ito ang dineklara ni Atty. Romulo Macalintal, abogado ni Robredo, kaugnay ng pagsisimula ng preliminary conference ngayong hapon sa Presidential Electoral Tribunal (PET).',\r\n 'label': 1,\r\n 'premise': '\"Hindi ko ugali ang mamulitika; mas gusto kong tahimik na magtrabaho. Pero sasabihin ko ito ngayon: ang tapang, lakas, at diskarte, hindi nadadaan sa mapanirang salita. Ang kailangan ng taumbayan ay tapang sa gawa,\" ayon kay Robredo sa inilabas nitong statement.'}\r\n>>> newsph_nli[\"test\"][0]\r\n{'hypothesis': 'Ito ang dineklara ni Atty. Romulo Macalintal, abogado ni Robredo, kaugnay ng pagsisimula ng preliminary conference ngayong hapon sa Presidential Electoral Tribunal (PET).',\r\n 'label': 1,\r\n 'premise': '\"Hindi ko ugali ang mamulitika; mas gusto kong tahimik na magtrabaho. Pero sasabihin ko ito ngayon: ang tapang, lakas, at diskarte, hindi nadadaan sa mapanirang salita. Ang kailangan ng taumbayan ay tapang sa gawa,\" ayon kay Robredo sa inilabas nitong statement.'}\r\n```\r\n\r\nIn local, I modified the code of the source as below and got the correct result.\r\n```python\r\n71 test_path = os.path.join(download_path, \"test.csv\") \r\n```\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> newsph_nli = load_dataset(path=\".\/datasets\/newsph_nli.py\")\r\n>>> newsph_nli\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 420000\r\n })\r\n test: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 9000\r\n })\r\n validation: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 90000\r\n })\r\n})\r\n>>> newsph_nli[\"train\"][0]\r\n{'hypothesis': 'Ito ang dineklara ni Atty. Romulo Macalintal, abogado ni Robredo, kaugnay ng pagsisimula ng preliminary conference ngayong hapon sa Presidential Electoral Tribunal (PET).',\r\n 'label': 1,\r\n 'premise': '\"Hindi ko ugali ang mamulitika; mas gusto kong tahimik na magtrabaho. Pero sasabihin ko ito ngayon: ang tapang, lakas, at diskarte, hindi nadadaan sa mapanirang salita. Ang kailangan ng taumbayan ay tapang sa gawa,\" ayon kay Robredo sa inilabas nitong statement.'}\r\n>>> newsph_nli[\"test\"][0]\r\n{'hypothesis': '-- JAI (@JaiPaller) September 13, 2019',\r\n 'label': 1,\r\n 'premise': 'Pinag-iingat ng Konsulado ng Pilipinas sa Dubai ang publiko, partikular ang mga donor, laban sa mga scam na gumagamit ng mga charitable organization.'}\r\n```\r\n\r\nI don't have experience with open source pull requests, so I suggest that you reflect them in the source.\r\n\r\nThank you for reading :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2262\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2262\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2261","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2261\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2261\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2261\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2261","id":867088818,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyODIxNzQw","number":2261,"title":"Improve ReadInstruction logic and update docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-25T19:07:26Z","updated_at":"2021-05-17T18:24:44Z","closed_at":"2021-05-17T16:48:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2261","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2261","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2261.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2261.patch","merged_at":"2021-05-17T16:48:57Z"},"body":"Improve ReadInstruction logic and docs.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2261\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2261\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2260","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2260\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2260\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2260\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2260","id":866961697,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyNzMwODYx","number":2260,"title":"GooAQ dataset added","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-25T09:26:48Z","updated_at":"2021-05-07T08:36:17Z","closed_at":"2021-05-07T08:36:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2260","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2260","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2260.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2260.patch","merged_at":"2021-05-07T08:36:17Z"},"body":"@lhoestq here the dataset is stored with Git LFS. Should I add option for manual downloading of dataset using `git lfs pull` post repo cloning or can we accommodate this in the current `download_and_extract`?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2260\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2260\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2259","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2259\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2259\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2259\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2259","id":866880092,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyNjc2ODA0","number":2259,"title":"Add support for Split.ALL","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-25T01:45:42Z","updated_at":"2021-06-28T08:21:27Z","closed_at":"2021-06-28T08:21:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2259","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2259","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2259.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2259.patch","merged_at":"2021-06-28T08:21:27Z"},"body":"The title says it all.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2259\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2259\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2258","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2258\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2258\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2258\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2258","id":866870588,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyNjcxNTQy","number":2258,"title":"Fix incorrect update_metadata_with_features calls in ArrowDataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-25T00:48:38Z","updated_at":"2021-04-26T17:16:30Z","closed_at":"2021-04-26T16:54:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2258","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2258","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2258.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2258.patch","merged_at":"2021-04-26T16:54:04Z"},"body":"Fixes bugs in the `unpdate_metadata_with_features` calls (caused by changes in #2151)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2258\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2258\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2257","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2257\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2257\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2257\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2257","id":866755203,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyNTkwMDQw","number":2257,"title":"added metrics for CUAD","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-24T14:09:54Z","updated_at":"2021-04-29T09:53:38Z","closed_at":"2021-04-27T16:16:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2257","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2257","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2257.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2257.patch","merged_at":null},"body":"For now I've added F1, AUPR, Precision at 80% recall, and Precision at 90%. Last 3 metrics were reported in the [paper](https:\/\/arxiv.org\/pdf\/2103.06268.pdf). Please let me know if we require `exact_match` metric too here","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2257\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2257\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2256","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2256\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2256\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2256\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2256","id":866708609,"node_id":"MDU6SXNzdWU4NjY3MDg2MDk=","number":2256,"title":"Running `datase.map` with `num_proc > 1` uses a lot of memory","user":{"login":"roskoN","id":8143425,"node_id":"MDQ6VXNlcjgxNDM0MjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8143425?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/roskoN","html_url":"https:\/\/github.com\/roskoN","followers_url":"https:\/\/api.github.com\/users\/roskoN\/followers","following_url":"https:\/\/api.github.com\/users\/roskoN\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/roskoN\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/roskoN\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/roskoN\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/roskoN\/orgs","repos_url":"https:\/\/api.github.com\/users\/roskoN\/repos","events_url":"https:\/\/api.github.com\/users\/roskoN\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/roskoN\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-04-24T09:56:20Z","updated_at":"2021-04-26T17:12:15Z","closed_at":"2021-04-26T17:12:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\nRunning `datase.map` with `num_proc > 1` leads to a tremendous memory usage that requires swapping on disk and it becomes very slow.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndstc8_datset = load_dataset(\"roskoN\/dstc8-reddit-corpus\", keep_in_memory=False)\r\n\r\n\r\ndef _prepare_sample(batch):\r\n return {\"input_ids\": list(), \"attention_mask\": list()}\r\n\r\n\r\nfor split_name, dataset_split in list(dstc8_datset.items()):\r\n print(f\"Processing {split_name}\")\r\n encoded_dataset_split = dataset_split.map(\r\n function=_prepare_sample,\r\n batched=True,\r\n num_proc=4,\r\n remove_columns=dataset_split.column_names,\r\n batch_size=10,\r\n writer_batch_size=10,\r\n keep_in_memory=False,\r\n )\r\n print(encoded_dataset_split)\r\n\r\n path = f\".\/data\/encoded_{split_name}\"\r\n\r\n encoded_dataset_split.save_to_disk(path)\r\n```\r\n\r\n## Expected results\r\nMemory usage should stay within reasonable boundaries.\r\n\r\n\r\n## Actual results\r\nThis is htop-output from running the provided script.\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/8143425\/115954836-66954980-a4f3-11eb-8340-0153bdc3a475.png)\r\n\r\n## Versions\r\n```\r\n- Datasets: 1.6.0\r\n- Python: 3.8.8 (default, Apr 13 2021, 19:58:26)\r\n[GCC 7.3.0]\r\n- Platform: Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.10\r\n```\r\nRunning on WSL2\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2256\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2256\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2255","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2255\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2255\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2255\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2255","id":866242892,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyMTc0Njg4","number":2255,"title":"Task casting for text classification & question answering","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":15,"created_at":"2021-04-23T16:00:41Z","updated_at":"2021-05-18T13:31:36Z","closed_at":"2021-05-18T13:31:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2255","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2255","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2255.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2255.patch","merged_at":"2021-05-18T13:31:35Z"},"body":"This PR implements task preparation for a given task, in the continuation of #2143 \r\n\r\nTask taxonomy follows \ud83e\udd17 Transformers's pipelines taxonomy: https:\/\/github.com\/huggingface\/transformers\/tree\/master\/src\/transformers\/pipelines\r\n\r\nEdit by @lewtun:\r\n\r\nThis PR implements support for the following tasks:\r\n\r\n* `text-classification`\r\n* `question-answering`\r\n\r\nThe intended usage is as follows:\r\n\r\n```python\r\n# Load a dataset with default column names \/ features\r\nds = load_dataset(\"dataset_name\")\r\n# Cast column names \/ features to schema. Casting is defined in the dataset's `DatasetInfo`\r\nds = ds.prepare_for_task(task=\"text-classification\")\r\n# Casting can also be realised during load\r\nds = load_dataset(\"dataset_name\", task=\"text-classification\")\r\n# We can also combine shared tasks across dataset concatenation\r\nds1 = load_dataset(\"dataset_name_1\", task=\"text-classification\")\r\nds2 = load_dataset(\"dataset_name_2\", task=\"text-classification\")\r\n# If the tasks have the same schema, so will `ds_concat`\r\nds_concat = concatenate_datasets([ds1, ds2])\r\n```\r\n\r\nNote that the current implementation assumes that `DatasetInfo.task_templates` has been pre-defined by the user \/ contributor when overriding the `MyDataset(GeneratorBasedBuilder)._info` function.\r\n\r\nAs pointed out by @SBrandeis, for evaluation we'll need a way to detect which datasets are already have a compatible schema so we don't have to edit hundreds of dataset scripts. One possibility is to check if the schema features are a subset of the dataset ones, e.g.\r\n\r\n```python\r\nsquad = load_dataset(\".\/datasets\/squad\", split=\"train\")\r\nqa = QuestionAnswering()\r\nschema = Features({**qa.input_schema, **qa.label_schema})\r\nassert all(item in squad.features.items() for item in schema.items())\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2255\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2255\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2254","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2254\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2254\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2254\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2254","id":866169312,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyMTE1NDI0","number":2254,"title":"Update format, fingerprint and indices after add_item","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-23T14:31:49Z","updated_at":"2021-04-27T16:30:49Z","closed_at":"2021-04-27T16:30:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2254","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2254","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2254.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2254.patch","merged_at":"2021-04-27T16:30:48Z"},"body":"Added fingerprint and format update wrappers + update the indices by adding the index of the newly added item in the table.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2254\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2254\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2253","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2253\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2253\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2253\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2253","id":866034321,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyMDA2Njg3","number":2253,"title":"Perform minor refactoring: use config","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-23T11:45:47Z","updated_at":"2021-05-27T09:12:45Z","closed_at":"2021-04-27T15:02:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2253","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2253","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2253.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2253.patch","merged_at":"2021-04-27T15:02:58Z"},"body":"Perform minor refactoring related to `config`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2253\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2253\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2252","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2252\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2252\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2252\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2252","id":865870710,"node_id":"MDU6SXNzdWU4NjU4NzA3MTA=","number":2252,"title":"Slow dataloading with big datasets issue persists","user":{"login":"hwijeen","id":29157715,"node_id":"MDQ6VXNlcjI5MTU3NzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29157715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hwijeen","html_url":"https:\/\/github.com\/hwijeen","followers_url":"https:\/\/api.github.com\/users\/hwijeen\/followers","following_url":"https:\/\/api.github.com\/users\/hwijeen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hwijeen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hwijeen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hwijeen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hwijeen\/orgs","repos_url":"https:\/\/api.github.com\/users\/hwijeen\/repos","events_url":"https:\/\/api.github.com\/users\/hwijeen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hwijeen\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":37,"created_at":"2021-04-23T08:18:20Z","updated_at":"2021-11-24T17:49:55Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI reported too slow data fetching when data is large(#2210) a couple of weeks ago, and @lhoestq referred me to the fix (#2122).\r\nHowever, the problem seems to persist. Here is the profiled results:\r\n\r\n\r\n1) Running with 60GB\r\n```\r\nAction \t| Mean duration (s)\t|Num calls \t| Total time (s) \t| Percentage % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nTotal \t| - \t|_ \t| 517.96 \t| 100 % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nmodel_backward \t| 0.26144 \t|100 \t| 26.144 \t| 5.0475 \t|\r\nmodel_forward \t| 0.11123 \t|100 \t| 11.123 \t| 2.1474 \t|\r\nget_train_batch \t| 0.097121 \t|100 \t| 9.7121 \t| 1.8751 \t|\r\n```\r\n\r\n\r\n3) Running with 600GB, datasets==1.6.0\r\n```\r\nAction \t| Mean duration (s)\t|Num calls \t| Total time (s) \t| Percentage % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nTotal \t| - \t|_ \t| 4563.2 \t| 100 % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nget_train_batch \t| 5.1279 \t|100 \t| 512.79 \t| 11.237 \t|\r\nmodel_backward \t| 4.8394 \t|100 \t| 483.94 \t| 10.605 \t|\r\nmodel_forward \t| 0.12162 \t|100 \t| 12.162 \t| 0.26652 \t|\r\n```\r\n\r\nI see that `get_train_batch` lags when data is large. Could this be related to different issues?\r\nI would be happy to provide necessary information to investigate.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2252\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2252\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2251","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2251\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2251\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2251\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2251","id":865848705,"node_id":"MDU6SXNzdWU4NjU4NDg3MDU=","number":2251,"title":"while running run_qa.py, ran into a value error","user":{"login":"nlee0212","id":44570724,"node_id":"MDQ6VXNlcjQ0NTcwNzI0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44570724?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nlee0212","html_url":"https:\/\/github.com\/nlee0212","followers_url":"https:\/\/api.github.com\/users\/nlee0212\/followers","following_url":"https:\/\/api.github.com\/users\/nlee0212\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nlee0212\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nlee0212\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nlee0212\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nlee0212\/orgs","repos_url":"https:\/\/api.github.com\/users\/nlee0212\/repos","events_url":"https:\/\/api.github.com\/users\/nlee0212\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nlee0212\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-23T07:51:03Z","updated_at":"2021-04-23T07:51:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"command:\r\n\r\npython3 run_qa.py --model_name_or_path hyunwoongko\/kobart --dataset_name squad_kor_v2 --do_train --do_eval --per_device_train_batch_size 8 --learning_rate 3e-5 --num_train_epochs 3 --max_seq_length 512 --doc_stride 128 --output_dir \/tmp\/debug_squad\/\r\n\r\nerror: \r\n\r\nValueError: External features info don't match the dataset:\r\nGot\r\n{'id': Value(dtype='string', id=None), 'title': Value(dtype='string', id=None), 'context': Value(dtype='string', id=None), 'question': Value(dtype='string', id=None), 'answer': {'text': Value(dtype='string', id=None), 'answer_start': Value(dtype='int32', id=None), 'html_answer_start': Value(dtype='int32', id=None)}, 'url': Value(dtype='string', id=None), 'raw_html': Value(dtype='string', id=None)}\r\nwith type\r\nstruct, context: string, id: string, question: string, raw_html: string, title: string, url: string>\r\n\r\nbut expected something like\r\n{'answer': {'answer_start': Value(dtype='int32', id=None), 'html_answer_start': Value(dtype='int32', id=None), 'text': Value(dtype='string', id=None)}, 'context': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'question': Value(dtype='string', id=None), 'raw_html': Value(dtype='string', id=None), 'title': Value(dtype='string', id=None), 'url': Value(dtype='string', id=None)}\r\nwith type\r\nstruct, context: string, id: string, question: string, raw_html: string, title: string, url: string>\r\n\r\nI didn't encounter this error 4 hours ago. any solutions for this kind of issue?\r\nlooks like gained dataset format refers to 'Data Fields', while expected refers to 'Data Instances'.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2251\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2251\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2250","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2250\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2250\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2250\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2250","id":865402449,"node_id":"MDU6SXNzdWU4NjU0MDI0NDk=","number":2250,"title":"some issue in loading local txt file as Dataset for run_mlm.py","user":{"login":"alighofrani95","id":14968123,"node_id":"MDQ6VXNlcjE0OTY4MTIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14968123?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alighofrani95","html_url":"https:\/\/github.com\/alighofrani95","followers_url":"https:\/\/api.github.com\/users\/alighofrani95\/followers","following_url":"https:\/\/api.github.com\/users\/alighofrani95\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alighofrani95\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alighofrani95\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alighofrani95\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alighofrani95\/orgs","repos_url":"https:\/\/api.github.com\/users\/alighofrani95\/repos","events_url":"https:\/\/api.github.com\/users\/alighofrani95\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alighofrani95\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-22T19:39:13Z","updated_at":"2021-08-18T03:49:12Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"![image](https:\/\/user-images.githubusercontent.com\/14968123\/115773877-18cef300-a3c6-11eb-8e58-a9cbfd1001ec.png)\r\n\r\nfirst of all, I tried to load 3 .txt files as a dataset (sure that the directory and permission is OK.), I face with the below error.\r\n\r\n> FileNotFoundError: [Errno 2] No such file or directory: 'c'\r\n\r\nby removing one of the training .txt files It's fixed and although if I put all file as training it's ok\r\n![image](https:\/\/user-images.githubusercontent.com\/14968123\/115774207-867b1f00-a3c6-11eb-953b-905cfb112d25.png)\r\n![image](https:\/\/user-images.githubusercontent.com\/14968123\/115774264-9b57b280-a3c6-11eb-9f36-7b109f0e5a31.png)\r\n\r\n\r\nafter this, my question is how could I use this defined Dataset for run_mlm.py for from scratch pretraining.\r\nby using --train_file path_to_train_file just can use one .txt , .csv or, .json file. I tried to set my defined Dataset as --dataset_name but the below issue occurs.\r\n\r\n\r\n> Traceback (most recent call last):\r\n File \"\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py\", line 336, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py\", line 621, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/dataset\/dataset.py\r\n\r\n> During handling of the above exception, another exception occurred:\r\n\r\n> Traceback (most recent call last):\r\n File \"run_mlm.py\", line 486, in \r\n main()\r\n File \"run_mlm.py\", line 242, in main\r\n datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name, cache_dir=model_args.cache_dir)\r\n File \"\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py\", line 719, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py\", line 347, in prepare_module\r\n combined_path, github_file_path\r\nFileNotFoundError: Couldn't find file locally at dataset\/dataset.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.6.0\/datasets\/dataset\/dataset.py.\r\nThe file is also not present on the master branch on github.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2250\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2250\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2249","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2249\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2249\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2249\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2249","id":865257826,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIxMzU1MzE3","number":2249,"title":"Allow downloading\/processing\/caching only specific splits","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/8","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8\/labels","id":6968069,"node_id":"MI_kwDODunzps4AalMF","number":8,"title":"1.12","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":4,"closed_issues":2,"state":"open","created_at":"2021-07-21T15:34:56Z","updated_at":"2021-10-13T10:26:33Z","due_on":"2021-08-30T07:00:00Z","closed_at":null},"comments":2,"created_at":"2021-04-22T17:51:44Z","updated_at":"2021-09-02T05:33:31Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2249","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2249","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2249.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2249.patch","merged_at":null},"body":"Allow downloading\/processing\/caching only specific splits without downloading\/processing\/caching the other splits.\r\n\r\nThis PR implements two steps to handle only specific splits:\r\n- it allows processing\/caching only specific splits into Arrow files\r\n- for some simple cases, it allows downloading only specific splits (which is more intricate as it depends on the user-defined method `_split_generators`)\r\n\r\nThis PR makes several assumptions:\r\n- `DownloadConfig` contains the configuration settings for downloading\r\n- the parameter `split` passed to `load_dataset` is just a parameter for loading (from cache), not for downloading","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2249\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2249\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2248","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2248\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2248\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2248\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2248","id":864853447,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIxMDEyNzg5","number":2248,"title":"Implement Dataset to JSON","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/3","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3\/labels","id":6644287,"node_id":"MDk6TWlsZXN0b25lNjY0NDI4Nw==","number":3,"title":"1.7","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":3,"state":"closed","created_at":"2021-04-09T13:16:31Z","updated_at":"2021-05-31T16:20:53Z","due_on":"2021-05-14T07:00:00Z","closed_at":"2021-05-31T16:20:53Z"},"comments":0,"created_at":"2021-04-22T11:46:51Z","updated_at":"2021-04-27T15:29:21Z","closed_at":"2021-04-27T15:29:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2248","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2248","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2248.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2248.patch","merged_at":"2021-04-27T15:29:20Z"},"body":"Implement `Dataset.to_json`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2248\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2248\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2247","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2247\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2247\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2247\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2247","id":864817520,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIwOTgzNzY3","number":2247,"title":"Implement Dataset from Parquet","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/7","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7\/labels","id":6931350,"node_id":"MDk6TWlsZXN0b25lNjkzMTM1MA==","number":7,"title":"1.11","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":2,"state":"closed","created_at":"2021-07-09T05:49:00Z","updated_at":"2021-09-02T05:34:03Z","due_on":"2021-07-30T07:00:00Z","closed_at":"2021-09-02T05:34:03Z"},"comments":2,"created_at":"2021-04-22T11:01:38Z","updated_at":"2021-07-26T13:28:52Z","closed_at":"2021-07-26T13:28:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2247","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2247","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2247.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2247.patch","merged_at":null},"body":"Implement instantiation of Dataset from Parquet file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2247\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2247\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2246","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2246\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2246\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2246\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2246","id":864220031,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIwNDg3OTUw","number":2246,"title":"Faster map w\/ input_columns & faster slicing w\/ Iterable keys","user":{"login":"norabelrose","id":39116809,"node_id":"MDQ6VXNlcjM5MTE2ODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39116809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/norabelrose","html_url":"https:\/\/github.com\/norabelrose","followers_url":"https:\/\/api.github.com\/users\/norabelrose\/followers","following_url":"https:\/\/api.github.com\/users\/norabelrose\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/norabelrose\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/norabelrose\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/norabelrose\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/norabelrose\/orgs","repos_url":"https:\/\/api.github.com\/users\/norabelrose\/repos","events_url":"https:\/\/api.github.com\/users\/norabelrose\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/norabelrose\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-21T19:49:07Z","updated_at":"2021-04-26T16:13:59Z","closed_at":"2021-04-26T16:13:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2246","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2246","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2246.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2246.patch","merged_at":"2021-04-26T16:13:58Z"},"body":"@lhoestq Fixes #2193 \r\n\r\n- `map` now uses `with_format` to only load needed columns in memory when `input_columns` is set\r\n- Slicing datasets with Iterables of indices now uses a new `Table.fast_gather` method, implemented with `np.searchsorted`, to find the appropriate batch indices all at once. `pa.concat_tables` is no longer used for this; we just call `pa.Table.from_batches` with a list of all the batch slices.\r\n\r\nTogether these changes have sped up batched `map()` calls over subsets of columns quite considerably in my initial testing.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2246\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2246\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2245","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2245\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2245\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2245\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2245","id":863191655,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE5NjQzMjQ3","number":2245,"title":"Add `key` type and duplicates verification with hashing","user":{"login":"NikhilBartwal","id":42388668,"node_id":"MDQ6VXNlcjQyMzg4NjY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42388668?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NikhilBartwal","html_url":"https:\/\/github.com\/NikhilBartwal","followers_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/followers","following_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/orgs","repos_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/repos","events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":17,"created_at":"2021-04-20T20:03:19Z","updated_at":"2021-05-10T18:04:37Z","closed_at":"2021-05-10T17:31:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2245","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2245","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2245.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2245.patch","merged_at":"2021-05-10T17:31:21Z"},"body":"Closes #2230 \r\nThere is currently no verification for the data type and the uniqueness of the keys yielded by the `dataset_builder`.\r\nThis PR is currently a work in progress with the following goals:\r\n\r\n- [x] Adding `hash_salt` to `ArrowWriter` so that the keys belonging to different splits have different hash\r\n- [x] Add `key` arrtibute to `ArrowWriter.write()` for hashing\r\n- [x] Add a hashing class which takes an input key of certain type (`str`\/`int`\/anything convertible to string) and produces a 128-bit hash using `hashlib.md5`\r\n- [x] Creating a function giving a custom error message when non-unique keys are found \r\n **[This will take care of type-checking for keys]**\r\n- [x] Checking for duplicate keys in `writer.write()` for each batch\r\n\r\n[**NOTE**: This PR is currently concerned with `GeneratorBasedBuilder` only, for simplification. A subsequent PR will be made in future for `ArrowBasedBuilder`]\r\n\r\n@lhoestq Thank you for the feedback. It would be great to have your guidance on this!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2245\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2245\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2244","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2244\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2244\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2244\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2244","id":863029946,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE5NTAyODc0","number":2244,"title":"Set specific cache directories per test function call","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/8","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/8\/labels","id":6968069,"node_id":"MI_kwDODunzps4AalMF","number":8,"title":"1.12","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":4,"closed_issues":2,"state":"open","created_at":"2021-07-21T15:34:56Z","updated_at":"2021-10-13T10:26:33Z","due_on":"2021-08-30T07:00:00Z","closed_at":null},"comments":4,"created_at":"2021-04-20T17:06:22Z","updated_at":"2021-09-02T05:33:31Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2244","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2244","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2244.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2244.patch","merged_at":null},"body":"Implement specific cache directories (datasets, metrics and modules) per test function call.\r\n\r\nCurrently, the cache directories are set within the temporary test directory, but they are shared across all test function calls.\r\n\r\nThis PR implements specific cache directories for each test function call, so that tests are atomic and there are no side effects.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2244\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2244\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2243","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2243\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2243\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2243\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2243","id":862909389,"node_id":"MDU6SXNzdWU4NjI5MDkzODk=","number":2243,"title":"Map is slow and processes batches one after another","user":{"login":"villmow","id":2743060,"node_id":"MDQ6VXNlcjI3NDMwNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2743060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/villmow","html_url":"https:\/\/github.com\/villmow","followers_url":"https:\/\/api.github.com\/users\/villmow\/followers","following_url":"https:\/\/api.github.com\/users\/villmow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/villmow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/villmow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/villmow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/villmow\/orgs","repos_url":"https:\/\/api.github.com\/users\/villmow\/repos","events_url":"https:\/\/api.github.com\/users\/villmow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/villmow\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-04-20T14:58:20Z","updated_at":"2021-05-03T17:54:33Z","closed_at":"2021-05-03T17:54:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI have a somewhat unclear bug to me, where I can't figure out what the problem is. The code works as expected on a small subset of my dataset (2000 samples) on my local machine, but when I execute the same code with a larger dataset (1.4 million samples) this problem occurs. Thats why I can't give exact steps to reproduce, I'm sorry. \r\n\r\nI process a large dataset in a two step process. I first call map on a dataset I load from disk and create a new dataset from it. This works like expected and `map` uses all workers I started it with. Then I process the dataset created by the first step, again with `map`, which is really slow and starting only one or two process at a time. Number of processes is the same for both steps.\r\n\r\npseudo code:\r\n```python\r\nds = datasets.load_from_disk(\"path\")\r\nnew_dataset = ds.map(work, batched=True, ...) # fast uses all processes\r\nfinal_dataset = new_dataset.map(work2, batched=True, ...) # slow starts one process after another\r\n```\r\n\r\n## Expected results\r\nSecond stage should be as fast as the first stage.\r\n\r\n## Versions\r\nPaste the output of the following code:\r\n- Datasets: 1.5.0\r\n- Python: 3.8.8 (default, Feb 24 2021, 21:46:12)\r\n- Platform: Linux-5.4.0-60-generic-x86_64-with-glibc2.10 \r\n\r\nDo you guys have any idea? Thanks a lot!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2243\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2243\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2242","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2242\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2242\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2242\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2242","id":862870205,"node_id":"MDU6SXNzdWU4NjI4NzAyMDU=","number":2242,"title":"Link to datasets viwer on Quick Tour page returns \"502 Bad Gateway\"","user":{"login":"martavillegas","id":6735707,"node_id":"MDQ6VXNlcjY3MzU3MDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6735707?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/martavillegas","html_url":"https:\/\/github.com\/martavillegas","followers_url":"https:\/\/api.github.com\/users\/martavillegas\/followers","following_url":"https:\/\/api.github.com\/users\/martavillegas\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/martavillegas\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/martavillegas\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/martavillegas\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/martavillegas\/orgs","repos_url":"https:\/\/api.github.com\/users\/martavillegas\/repos","events_url":"https:\/\/api.github.com\/users\/martavillegas\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/martavillegas\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-20T14:19:51Z","updated_at":"2021-04-20T15:02:45Z","closed_at":"2021-04-20T15:02:45Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Link to datasets viwer (https:\/\/huggingface.co\/datasets\/viewer\/) on Quick Tour page (https:\/\/huggingface.co\/docs\/datasets\/quicktour.html) returns \"502 Bad Gateway\"\r\n\r\nThe same error with https:\/\/huggingface.co\/datasets\/viewer\/?dataset=glue&config=mrpc ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2242\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2242\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2241","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2241\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2241\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2241\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2241","id":862696460,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE5MjI0MzIw","number":2241,"title":"Add SLR32 to OpenSLR","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-20T11:02:45Z","updated_at":"2021-04-23T16:21:24Z","closed_at":"2021-04-23T15:36:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2241","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2241","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2241.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2241.patch","merged_at":"2021-04-23T15:36:15Z"},"body":"I would like to add SLR32 to OpenSLR. It contains four South African languages: Afrikaans, Sesotho, Setswana and isiXhosa","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2241\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2241\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2240","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2240\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2240\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2240\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2240","id":862537856,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE5MDkyODc5","number":2240,"title":"Clarify how to load wikihow","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-20T08:02:58Z","updated_at":"2021-04-21T09:54:57Z","closed_at":"2021-04-21T09:54:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2240","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2240","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2240.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2240.patch","merged_at":"2021-04-21T09:54:57Z"},"body":"Explain clearer how to load the dataset in the manual download instructions.\r\n\r\nEn relation with #2239.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2240\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2240\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2239","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2239\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2239\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2239\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2239","id":861904306,"node_id":"MDU6SXNzdWU4NjE5MDQzMDY=","number":2239,"title":"Error loading wikihow dataset","user":{"login":"odellus","id":4686956,"node_id":"MDQ6VXNlcjQ2ODY5NTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4686956?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/odellus","html_url":"https:\/\/github.com\/odellus","followers_url":"https:\/\/api.github.com\/users\/odellus\/followers","following_url":"https:\/\/api.github.com\/users\/odellus\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/odellus\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/odellus\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/odellus\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/odellus\/orgs","repos_url":"https:\/\/api.github.com\/users\/odellus\/repos","events_url":"https:\/\/api.github.com\/users\/odellus\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/odellus\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-19T21:02:31Z","updated_at":"2021-04-20T16:33:11Z","closed_at":"2021-04-20T16:33:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nWhen attempting to load wikihow into a dataset with\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('wikihow', data_dir='.\/wikihow')\r\n```\r\nI get the message:\r\n```\r\nAttributeError: 'BuilderConfig' object has no attribute 'filename'\r\n```\r\nat the end of a [full stack trace](https:\/\/gist.github.com\/odellus\/602c3b2de52f541d353b1022f320ffc2).\r\n\r\n## Steps to reproduce the bug\r\n\r\nI have followed the instructions for creating a wikihow dataset. The [wikihow dataset site](https:\/\/huggingface.co\/datasets\/wikihow) says to use \r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('wikihow')\r\n```\r\nto load the dataset. I do so and I get the message\r\n```\r\nAssertionError: The dataset wikihow with config all requires manual data.\r\n Please follow the manual download instructions: You need to manually download two wikihow files. An overview of which files to download can be seen at https:\/\/github.com\/mahnazkoupaee\/WikiHow-Dataset.\r\n You need to download the following two files manually:\r\n 1) https:\/\/ucsb.app.box.com\/s\/ap23l8gafpezf4tq3wapr6u8241zz358 and save the file under \/wikihowAll.csv\r\n 2) https:\/\/ucsb.app.box.com\/s\/7yq601ijl1lzvlfu4rjdbbxforzd2oag and save the file under \/wikihowSep.csv\r\n\r\n The can e.g. be \"~\/manual_wikihow_data\".\r\n\r\n Wikihow can then be loaded using the following command `datasets.load_dataset(\"wikihow\", data_dir=\"\")`.\r\n .\r\n Manual data can be loaded with `datasets.load_dataset(wikihow, data_dir='')\r\n```\r\n\r\nSo I create a directory `.\/wikihow` and download `wikihowAll.csv` and `wikihowSep.csv` into the new directory.\r\n\r\nThen I run \r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('wikihow', data_dir='.\/wikihow')\r\n```\r\n\r\nthat's when I get the [stack trace](https:\/\/gist.github.com\/odellus\/602c3b2de52f541d353b1022f320ffc2)\r\n\r\n## Expected results\r\nI expected it to load the downloaded files into a dataset.\r\n\r\n## Actual results\r\n```python\r\nUsing custom data configuration default-data_dir=.%2Fwikihow\r\nDownloading and preparing dataset wikihow\/default (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/azureuser\/.cache\/huggingface\/datasets\/wikihow\/default-data_dir=.%2Fwikihow\/0.0.0\/58f42f8f0e4d459811a0f69aaab35870093830ccd58006769e7e1eb3e0e686c2... ---------------------------------------------------------------------------\r\nAttributeError\r\nTraceback (most recent call last)\r\n in \r\n----> 1 dataset = load_dataset('wikihow',data_dir='.\/wikihow')\r\n~\/.local\/lib\/python3.6\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, **config_kwargs)\r\n745 try_from_hf_gcs=try_from_hf_gcs,\r\n746 base_path=base_path,--> \r\n747 use_auth_token=use_auth_token,\r\n748 )\r\n749 \r\n~\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n577 if not downloaded_from_gcs:\r\n578 self._download_and_prepare( -->\r\n579 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs \r\n580 ) \r\n581 # Sync info\r\n~\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n632 split_dict = SplitDict(dataset_name=self.name)\r\n633 split_generators_kwargs = self._make_split_generators_kwargs(prepare_split_kwargs) -->\r\n634 split_generators = self._split_generators(dl_manager, **split_generators_kwargs) \r\n635 \r\n636 # Checksums verification\r\n~\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wikihow\/58f42f8f0e4d459811a0f69aaab35870093830ccd58006769e7e1eb3e0e686c2\/wikihow.py in _split_generators(self, dl_manager)\r\n132\r\n133 path_to_manual_file = os.path.join(\r\n--> 134 os.path.abspath(os.path.expanduser(dl_manager.manual_dir)), self.config.filename \r\n135 ) \r\n136\r\nAttributeError: 'BuilderConfig' object has no attribute 'filename'\r\n```\r\n## Versions\r\nPaste the output of the following code:\r\n```python\r\nimport datasets\r\nimport sys\r\nimport platform\r\n\r\nprint(f\"\"\"\r\n- Datasets: {datasets.__version__}\r\n- Python: {sys.version}\r\n- Platform: {platform.platform()}\r\n\"\"\")\r\n```\r\n```\r\n- Datasets: 1.5.0\r\n- Python: 3.6.9 (default, Jan 26 2021, 15:33:00) [GCC 8.4.0]\r\n- Platform: Linux-5.4.0-1046-azure-x86_64-with-Ubuntu-18.04-bionic\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2239\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2239\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2238","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2238\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2238\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2238\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2238","id":861518291,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE4MTY5NzM5","number":2238,"title":"NLU evaluation data","user":{"login":"dkajtoch","id":32985207,"node_id":"MDQ6VXNlcjMyOTg1MjA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32985207?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dkajtoch","html_url":"https:\/\/github.com\/dkajtoch","followers_url":"https:\/\/api.github.com\/users\/dkajtoch\/followers","following_url":"https:\/\/api.github.com\/users\/dkajtoch\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dkajtoch\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dkajtoch\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dkajtoch\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dkajtoch\/orgs","repos_url":"https:\/\/api.github.com\/users\/dkajtoch\/repos","events_url":"https:\/\/api.github.com\/users\/dkajtoch\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dkajtoch\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-19T16:47:20Z","updated_at":"2021-04-23T15:32:05Z","closed_at":"2021-04-23T15:32:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2238","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2238","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2238.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2238.patch","merged_at":"2021-04-23T15:32:05Z"},"body":"New intent classification dataset from https:\/\/github.com\/xliuhw\/NLU-Evaluation-Data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2238\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2238\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2237","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2237\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2237\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2237\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2237","id":861427439,"node_id":"MDU6SXNzdWU4NjE0Mjc0Mzk=","number":2237,"title":"Update Dataset.dataset_size after transformed with map","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-19T15:19:38Z","updated_at":"2021-04-20T14:22:05Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After loading a dataset, if we transform it by using `.map` its `dataset_size` attirbute is not updated.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2237\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2237\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2236","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2236\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2236\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2236\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2236","id":861388145,"node_id":"MDU6SXNzdWU4NjEzODgxNDU=","number":2236,"title":"Request to add StrategyQA dataset","user":{"login":"sarahwie","id":8027676,"node_id":"MDQ6VXNlcjgwMjc2NzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8027676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sarahwie","html_url":"https:\/\/github.com\/sarahwie","followers_url":"https:\/\/api.github.com\/users\/sarahwie\/followers","following_url":"https:\/\/api.github.com\/users\/sarahwie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sarahwie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sarahwie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sarahwie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sarahwie\/orgs","repos_url":"https:\/\/api.github.com\/users\/sarahwie\/repos","events_url":"https:\/\/api.github.com\/users\/sarahwie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sarahwie\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-19T14:46:26Z","updated_at":"2021-04-19T14:46:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Request to add StrategyQA dataset\r\n- **Name:** StrategyQA\r\n- **Description:** open-domain QA [(project page)](https:\/\/allenai.org\/data\/strategyqa)\r\n- **Paper:** [url](https:\/\/arxiv.org\/pdf\/2101.02235.pdf)\r\n- **Data:** [here](https:\/\/allenai.org\/data\/strategyqa)\r\n- **Motivation:** uniquely-formulated dataset that also includes a question-decomposition breakdown and associated Wikipedia annotations for each step. Good for multi-hop reasoning modeling.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2236\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2236\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2235","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2235\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2235\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2235\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2235","id":861040716,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE3Nzc0NDUw","number":2235,"title":"Update README.md","user":{"login":"PierreColombo","id":22492839,"node_id":"MDQ6VXNlcjIyNDkyODM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22492839?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PierreColombo","html_url":"https:\/\/github.com\/PierreColombo","followers_url":"https:\/\/api.github.com\/users\/PierreColombo\/followers","following_url":"https:\/\/api.github.com\/users\/PierreColombo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PierreColombo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PierreColombo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PierreColombo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PierreColombo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PierreColombo\/repos","events_url":"https:\/\/api.github.com\/users\/PierreColombo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PierreColombo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-19T08:21:02Z","updated_at":"2021-04-19T12:49:19Z","closed_at":"2021-04-19T12:49:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2235","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2235","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2235.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2235.patch","merged_at":"2021-04-19T12:49:19Z"},"body":"Adding relevant citations (paper accepted at AAAI 2020 & EMNLP 2020) to the benchmark","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2235\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2235\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2234","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2234\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2234\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2234\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2234","id":860442246,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE3MzI4NDU3","number":2234,"title":"Fix bash snippet formatting in ADD_NEW_DATASET.md","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-17T16:01:08Z","updated_at":"2021-04-19T10:57:31Z","closed_at":"2021-04-19T07:51:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2234","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2234","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2234.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2234.patch","merged_at":"2021-04-19T07:51:36Z"},"body":"This PR indents the paragraphs around the bash snippets in ADD_NEW_DATASET.md to fix formatting.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2234\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2234\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2233","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2233\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2233\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2233\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2233","id":860097084,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE3MDYwMTkw","number":2233,"title":"Fix `xnli` dataset tuple key","user":{"login":"NikhilBartwal","id":42388668,"node_id":"MDQ6VXNlcjQyMzg4NjY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42388668?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NikhilBartwal","html_url":"https:\/\/github.com\/NikhilBartwal","followers_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/followers","following_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/orgs","repos_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/repos","events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-16T19:12:42Z","updated_at":"2021-04-19T08:56:42Z","closed_at":"2021-04-19T08:56:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2233","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2233","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2233.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2233.patch","merged_at":"2021-04-19T08:56:42Z"},"body":"Closes #2229 \r\nThe `xnli` dataset yields a tuple key in case of `ar` which is inconsistant with the acceptable key types (str\/int).\r\nThe key was thus ported to `str` keeping the original information intact.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2233\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2233\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2232","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2232\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2232\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2232\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2232","id":860075931,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE3MDQyNTI4","number":2232,"title":"Start filling GLUE dataset card","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-16T18:37:37Z","updated_at":"2021-04-21T09:33:09Z","closed_at":"2021-04-21T09:33:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2232","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2232","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2232.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2232.patch","merged_at":"2021-04-21T09:33:08Z"},"body":"The dataset card was pretty much empty.\r\n\r\nI added the descriptions (mainly from TFDS since the script is the same), and I also added the tasks tags as well as examples for a subset of the tasks.\r\n\r\ncc @sgugger ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2232\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2232\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2231","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2231\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2231\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2231\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2231","id":859850488,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE2ODYyNTEx","number":2231,"title":"Fix map when removing columns on a formatted dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-16T14:08:55Z","updated_at":"2021-04-16T15:10:05Z","closed_at":"2021-04-16T15:10:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2231","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2231","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2231.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2231.patch","merged_at":"2021-04-16T15:10:04Z"},"body":"This should fix issue #2226\r\n\r\nThe `remove_columns` argument was ignored on formatted datasets","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2231\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2231\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2230","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2230\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2230\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2230\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2230","id":859817159,"node_id":"MDU6SXNzdWU4NTk4MTcxNTk=","number":2230,"title":"Keys yielded while generating dataset are not being checked","user":{"login":"NikhilBartwal","id":42388668,"node_id":"MDQ6VXNlcjQyMzg4NjY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42388668?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NikhilBartwal","html_url":"https:\/\/github.com\/NikhilBartwal","followers_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/followers","following_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/orgs","repos_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/repos","events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-04-16T13:29:47Z","updated_at":"2021-05-10T17:31:21Z","closed_at":"2021-05-10T17:31:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The keys used in the dataset generation script to ensure the same order is generated on every user's end should be checked for their types (i.e either `str` or `int`) as well as whether they are unique or not.\r\nCurrently, the keys are not being checked for any of these, as evident from `xnli' dataset generation:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/56346791aed417306d054d89bd693d6b7eab17f7\/datasets\/xnli\/xnli.py#L196\r\nEven after having a tuple as key, the dataset is generated without any warning.\r\n\r\nAlso, as tested in the case of `anli` dataset (I tweeked the dataset script to use `1` as a key for every example):\r\n```\r\n>>> import datasets\r\n>>> nik = datasets.load_dataset('anli')\r\nDownloading and preparing dataset anli\/plain_text (download: 17.76 MiB, generated: 73.55 MiB, post-processed: Unknown size, total: 91.31 MiB) to C:\\Users\\nikhil\\.cache\\huggingface\\datasets\\anli\\plain_text\\0.1.0\\43fa2c99c10bf8478f1fa0860f7b122c6b277c4c41306255b7641257cf4e3299...\r\n0 examples [00:00, ? examples\/s]1 {'uid': '0fd0abfb-659e-4453-b196-c3a64d2d8267', 'premise': 'The Parma trolleybus system (Italian: \"Rete filoviaria di Parma\" ) forms part of the public transport network of the city and \"comune\" of Parma, in the region of Emilia-Romagna, northern Italy. In operation since 1953, the system presently comprises four urban routes.', 'hypothesis': 'The trolleybus system has over 2 urban routes', 'label': 'entailment', 'reason': ''}\r\n2021-04-16 12:38:14.483968: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:49] Successfully opened dynamic library cudart64_110.dll\r\n1 examples [00:01, 1.87s\/ examples]1 {'uid': '7ed72ff4-40b7-4f8a-b1b9-6c612aa62c84', 'premise': 'Alexandra Lendon Bastedo (9 March 1946 \u2013 12 January 2014) was a British actress, best known for her role as secret agent Sharron Macready in the 1968 British espionage\/science fiction adventure series \"The Champions\". She has been cited as a sex symbol of the 1960s and 1970s. Bastedo was a vegetarian and animal welfare advocate.', 'hypothesis': \"Sharron Macready was a popular character through the 1980's.\", 'label': 'neutral', 'reason': ''}\r\n1 {'uid': '5d2930a3-62ac-485d-94d7-4e36cbbcd7b5', 'premise': 'Alexandra Lendon Bastedo (9 March 1946 \u2013 12 January 2014) was a British actress, best known for her role as secret agent Sharron Macready in the 1968 British espionage\/science fiction adventure series \"The Champions\". She has been cited as a sex symbol of the 1960s and 1970s. Bastedo was a vegetarian and animal welfare advocate.', 'hypothesis': \"Bastedo didn't keep any pets because of her views on animal rights.\", 'label': 'neutral', 'reason': ''}\r\n1 {'uid': '324db753-ddc9-4a85-a825-f09e2e5aebdd', 'premise': 'Alexandra Lendon Bastedo (9 March 1946 \u2013 12 January 2014) was a British actress, best known for her role as secret agent Sharron Macready in the 1968 British espionage\/science fiction adventure series \"The Champions\". She has been cited as a sex symbol of the 1960s and 1970s. Bastedo was a vegetarian and animal welfare advocate.', 'hypothesis': 'Alexandra Bastedo was named by her mother.', 'label': 'neutral', 'reason': ''}\r\n1 {'uid': '4874f429-da0e-406a-90c7-22240ff3ddf8', 'premise': 'Alexandra Lendon Bastedo (9 March 1946 \u2013 12 January 2014) was a British actress, best known for her role as secret agent Sharron Macready in the 1968 British espionage\/science fiction adventure series \"The Champions\". She has been cited as a sex symbol of the 1960s and 1970s. Bastedo was a vegetarian and animal welfare advocate.', 'hypothesis': 'Bastedo cared for all the animals that inhabit the earth.', 'label': 'neutral', 'reason': ''}\r\n```\r\nHere also, the dataset was generated successfuly even hough it had same keys without any warning.\r\n\r\nThe reason appears to stem from here:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/56346791aed417306d054d89bd693d6b7eab17f7\/src\/datasets\/builder.py#L988\r\nHere, although it has access to every key, but it is not being checked and the example is written directly:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/56346791aed417306d054d89bd693d6b7eab17f7\/src\/datasets\/builder.py#L992\r\n\r\nI would like to take this issue if you allow me. Thank You!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2230\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2230\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2229","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2229\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2229\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2229\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2229","id":859810602,"node_id":"MDU6SXNzdWU4NTk4MTA2MDI=","number":2229,"title":"`xnli` dataset creating a tuple key while yielding instead of `str` or `int`","user":{"login":"NikhilBartwal","id":42388668,"node_id":"MDQ6VXNlcjQyMzg4NjY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42388668?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NikhilBartwal","html_url":"https:\/\/github.com\/NikhilBartwal","followers_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/followers","following_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/orgs","repos_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/repos","events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-16T13:21:53Z","updated_at":"2021-04-19T08:56:42Z","closed_at":"2021-04-19T08:56:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When using `ds = datasets.load_dataset('xnli', 'ar')`, the dataset generation script uses the following section of code in the egging, which yields a tuple key instead of the specified `str` or `int` key:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/56346791aed417306d054d89bd693d6b7eab17f7\/datasets\/xnli\/xnli.py#L196\r\n\r\nSince, community datasets in Tensorflow Datasets also use HF datasets, this causes a Tuple key error while loading HF's `xnli` dataset. \r\nI'm up for sending a fix for this, I think we can simply use `file_idx + \"_\" + row_idx` as a unique key instead of a tuple.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2229\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2229\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2228","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2228\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2228\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2228\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2228","id":859795563,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE2ODE2MTQz","number":2228,"title":"[WIP] Add ArrayXD support for fixed size list.","user":{"login":"jblemoine","id":22685854,"node_id":"MDQ6VXNlcjIyNjg1ODU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22685854?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jblemoine","html_url":"https:\/\/github.com\/jblemoine","followers_url":"https:\/\/api.github.com\/users\/jblemoine\/followers","following_url":"https:\/\/api.github.com\/users\/jblemoine\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jblemoine\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jblemoine\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jblemoine\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jblemoine\/orgs","repos_url":"https:\/\/api.github.com\/users\/jblemoine\/repos","events_url":"https:\/\/api.github.com\/users\/jblemoine\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jblemoine\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-16T13:04:08Z","updated_at":"2021-04-19T13:02:18Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2228","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2228","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2228.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2228.patch","merged_at":null},"body":"Add support for fixed size list for ArrayXD when shape is known . See https:\/\/github.com\/huggingface\/datasets\/issues\/2146\r\nSince offset are not stored anymore, the file size is now roughly equal to the actual data size. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2228\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2228\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2227","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2227\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2227\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2227\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2227","id":859771526,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE2Nzk1NjMx","number":2227,"title":"Use update_metadata_with_features decorator in class_encode_column method","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-16T12:31:41Z","updated_at":"2021-04-16T13:49:40Z","closed_at":"2021-04-16T13:49:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2227","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2227","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2227.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2227.patch","merged_at":"2021-04-16T13:49:39Z"},"body":"Following @mariosasko 's comment","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2227\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2227\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2226","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2226\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2226\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2226\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2226","id":859720302,"node_id":"MDU6SXNzdWU4NTk3MjAzMDI=","number":2226,"title":"Batched map fails when removing all columns","user":{"login":"villmow","id":2743060,"node_id":"MDQ6VXNlcjI3NDMwNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2743060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/villmow","html_url":"https:\/\/github.com\/villmow","followers_url":"https:\/\/api.github.com\/users\/villmow\/followers","following_url":"https:\/\/api.github.com\/users\/villmow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/villmow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/villmow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/villmow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/villmow\/orgs","repos_url":"https:\/\/api.github.com\/users\/villmow\/repos","events_url":"https:\/\/api.github.com\/users\/villmow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/villmow\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-04-16T11:17:01Z","updated_at":"2021-04-16T15:10:41Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi @lhoestq ,\r\n\r\nI'm hijacking this issue, because I'm currently trying to do the approach you recommend:\r\n\r\n> Currently the optimal setup for single-column computations is probably to do something like\r\n> \r\n> ```python\r\n> result = dataset.map(f, input_columns=\"my_col\", remove_columns=dataset.column_names)\r\n> ```\r\n\r\nHere is my code: (see edit, in which I added a simplified version\r\n\r\n```\r\nThis is the error:\r\n```bash\r\npyarrow.lib.ArrowInvalid: Column 1 named tokens expected length 8964 but got length 1000\r\n```\r\nI wonder why this error occurs, when I delete every column? Can you give me a hint?\r\n\r\n### Edit:\r\nI preprocessed my dataset before (using map with the features argument) and saved it to disk. May this be part of the error? I can iterate over the\r\ncomplete dataset and print every sample before calling map. There seems to be no other problem with the dataset.\r\n\r\nI tried to simplify the code that crashes:\r\n\r\n```python\r\n# works\r\nlog.debug(dataset.column_names)\r\nlog.debug(dataset)\r\nfor i, sample in enumerate(dataset):\r\n log.debug(i, sample)\r\n\r\n# crashes\r\ncounted_dataset = dataset.map(\r\n lambda x: {\"a\": list(range(20))},\r\n input_columns=column,\r\n remove_columns=dataset.column_names,\r\n load_from_cache_file=False,\r\n num_proc=num_workers,\r\n batched=True,\r\n)\r\n```\r\n\r\n```\r\npyarrow.lib.ArrowInvalid: Column 1 named tokens expected length 20 but got length 1000\r\n```\r\n\r\nEdit2: \r\n\r\nMay this be a problem with a schema I set when preprocessing the dataset before? I tried to add the `features` argument to the function and then I get a new error:\r\n\r\n```python\r\n# crashes\r\ncounted_dataset = dataset.map(\r\n lambda x: {\"a\": list(range(20))},\r\n input_columns=column,\r\n remove_columns=dataset.column_names,\r\n load_from_cache_file=False,\r\n num_proc=num_workers,\r\n batched=True,\r\n features=datasets.Features(\r\n {\r\n \"a\": datasets.Sequence(datasets.Value(\"int32\"))\r\n }\r\n )\r\n)\r\n```\r\n\r\n```\r\n File \"env\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 1704, in _map_single\r\n writer.write_batch(batch)\r\n File \"env\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py\", line 312, in write_batch\r\n col_type = schema.field(col).type if schema is not None else None\r\n File \"pyarrow\/types.pxi\", line 1341, in pyarrow.lib.Schema.field\r\nKeyError: 'Column tokens does not exist in schema'\r\n```\r\n\r\n_Originally posted by @villmow in https:\/\/github.com\/huggingface\/datasets\/issues\/2193#issuecomment-820230874_","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2226\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2226\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2225","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2225\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2225\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2225\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2225","id":858469561,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE1NzAzMTY4","number":2225,"title":"fixed one instance of 'train' to 'test'","user":{"login":"alexwdong","id":46733535,"node_id":"MDQ6VXNlcjQ2NzMzNTM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46733535?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexwdong","html_url":"https:\/\/github.com\/alexwdong","followers_url":"https:\/\/api.github.com\/users\/alexwdong\/followers","following_url":"https:\/\/api.github.com\/users\/alexwdong\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexwdong\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexwdong\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexwdong\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexwdong\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexwdong\/repos","events_url":"https:\/\/api.github.com\/users\/alexwdong\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexwdong\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-15T04:26:40Z","updated_at":"2021-04-15T22:09:50Z","closed_at":"2021-04-15T21:19:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2225","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2225","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2225.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2225.patch","merged_at":"2021-04-15T21:19:09Z"},"body":"I believe this should be 'test' instead of 'train'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2225\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2225\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2224","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2224\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2224\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2224\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2224","id":857983361,"node_id":"MDU6SXNzdWU4NTc5ODMzNjE=","number":2224,"title":"Raise error if Windows max path length is not disabled","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-14T14:57:20Z","updated_at":"2021-04-14T14:59:13Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"On startup, raise an error if Windows max path length is not disabled; ask the user to disable it.\r\n\r\nLinked to discussion in #2220.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2224\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2224\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2223","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2223\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2223\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2223\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2223","id":857870800,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE1MjE4MDIz","number":2223,"title":"Set test cache config","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-04-14T12:55:24Z","updated_at":"2021-04-15T19:11:25Z","closed_at":"2021-04-15T19:11:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2223","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2223","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2223.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2223.patch","merged_at":"2021-04-15T19:11:25Z"},"body":"Currently, running the tests populates the default cache directory `\"~\/.cache\"`.\r\n\r\nThis PR monkey-patches the config to set the cache directory within the temporary test directory, avoiding side effects.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2223\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2223\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2222","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2222\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2222\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2222\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2222","id":857847231,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE1MTk5MTM5","number":2222,"title":"Fix too long WindowsFileLock name","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892913,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEz","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/wontfix","name":"wontfix","color":"ffffff","default":true,"description":"This will not be worked on"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-14T12:26:52Z","updated_at":"2021-04-14T15:00:25Z","closed_at":"2021-04-14T14:46:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2222","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2222","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2222.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2222.patch","merged_at":null},"body":"Fix WindowsFileLock name longer than allowed MAX_PATH by shortening the basename.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2222\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2222\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2221","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2221\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2221\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2221\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2221","id":857833770,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE1MTg4MTE5","number":2221,"title":"Add SLR70 - SLR80 and SLR86 to OpenSLR dataset","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-14T12:09:18Z","updated_at":"2021-04-14T13:50:19Z","closed_at":"2021-04-14T13:50:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2221","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2221","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2221.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2221.patch","merged_at":"2021-04-14T13:50:19Z"},"body":"I would like to add SLR70, SLR71, SLR72, SLR73, SLR74, SLR75, SLR76, SLR77, SLR78, SLR79, SLR80 and SLR86 to OpenSLR dataset. The languages are:\r\nNigerian English, Chilean Spanish, Columbian Spanish, Peruvian Spanish, Puerto Rico Spanish, Venezuelan Spanish, Basque, Galician, Gujarati and Kannada.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2221\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2221\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2220","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2220\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2220\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2220\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2220","id":857774626,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE1MTM4NDQz","number":2220,"title":"Fix infinite loop in WindowsFileLock","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892913,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEz","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/wontfix","name":"wontfix","color":"ffffff","default":true,"description":"This will not be worked on"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-14T10:49:58Z","updated_at":"2021-04-14T14:59:50Z","closed_at":"2021-04-14T14:59:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2220","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2220","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2220.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2220.patch","merged_at":null},"body":"Raise exception to avoid infinite loop.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2220\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2220\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2219","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2219\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2219\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2219\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2219","id":857321242,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE0NzYxMzA3","number":2219,"title":"Added CUAD dataset","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-13T21:05:03Z","updated_at":"2021-04-24T14:25:51Z","closed_at":"2021-04-16T08:50:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2219","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2219","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2219.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2219.patch","merged_at":"2021-04-16T08:50:44Z"},"body":"Dataset link : https:\/\/github.com\/TheAtticusProject\/cuad\/\r\n\r\nWorking on README.md currently.\r\n\r\nCloses #2084 and [#1](https:\/\/github.com\/TheAtticusProject\/cuad\/issues\/1). ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2219\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2219\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2218","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2218\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2218\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2218\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2218","id":857238435,"node_id":"MDU6SXNzdWU4NTcyMzg0MzU=","number":2218,"title":"Duplicates in the LAMA dataset","user":{"login":"amarasovic","id":7276193,"node_id":"MDQ6VXNlcjcyNzYxOTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7276193?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amarasovic","html_url":"https:\/\/github.com\/amarasovic","followers_url":"https:\/\/api.github.com\/users\/amarasovic\/followers","following_url":"https:\/\/api.github.com\/users\/amarasovic\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amarasovic\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amarasovic\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amarasovic\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amarasovic\/orgs","repos_url":"https:\/\/api.github.com\/users\/amarasovic\/repos","events_url":"https:\/\/api.github.com\/users\/amarasovic\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amarasovic\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-13T18:59:49Z","updated_at":"2021-04-14T21:42:27Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I observed duplicates in the LAMA probing dataset, see a minimal code below. \r\n\r\n```\r\n>>> import datasets\r\n>>> dataset = datasets.load_dataset('lama')\r\nNo config specified, defaulting to: lama\/trex\r\nReusing dataset lama (\/home\/anam\/.cache\/huggingface\/datasets\/lama\/trex\/1.1.0\/97deffae13eca0a18e77dfb3960bb31741e973586f5c1fe1ec0d6b5eece7bddc)\r\n>>> train_dataset = dataset['train']\r\n>>> train_dataset[0]\r\n{'description': 'language or languages a person has learned from early childhood', 'label': 'native language', 'masked_sentence': 'Louis Jules Trochu ([lwi \u0292yl t\u0281\u0254\u0283y]; 12 March 1815 \u2013 7 October 1896) was a [MASK] military leader and politician.', 'obj_label': 'French', 'obj_surface': 'French', 'obj_uri': 'Q150', 'predicate_id': 'P103', 'sub_label': 'Louis Jules Trochu', 'sub_surface': 'Louis Jules Trochu', 'sub_uri': 'Q441235', 'template': 'The native language of [X] is [Y] .', 'template_negated': '[X] is not owned by [Y] .', 'type': 'N-1', 'uuid': '40b2ed1c-0961-482e-844e-32596b6117c8'}\r\n>>> train_dataset[1]\r\n{'description': 'language or languages a person has learned from early childhood', 'label': 'native language', 'masked_sentence': 'Louis Jules Trochu ([lwi \u0292yl t\u0281\u0254\u0283y]; 12 March 1815 \u2013 7 October 1896) was a [MASK] military leader and politician.', 'obj_label': 'French', 'obj_surface': 'French', 'obj_uri': 'Q150', 'predicate_id': 'P103', 'sub_label': 'Louis Jules Trochu', 'sub_surface': 'Louis Jules Trochu', 'sub_uri': 'Q441235', 'template': 'The native language of [X] is [Y] .', 'template_negated': '[X] is not owned by [Y] .', 'type': 'N-1', 'uuid': '40b2ed1c-0961-482e-844e-32596b6117c8'}\r\n```\r\n\r\nI checked the original data available at https:\/\/dl.fbaipublicfiles.com\/LAMA\/data.zip. This particular duplicated comes from:\r\n```\r\n{\"uuid\": \"40b2ed1c-0961-482e-844e-32596b6117c8\", \"obj_uri\": \"Q150\", \"obj_label\": \"French\", \"sub_uri\": \"Q441235\", \"sub_label\": \"Louis Jules Trochu\", \"predicate_id\": \"P103\", \"evidences\": [{\"sub_surface\": \"Louis Jules Trochu\", \"obj_surface\": \"French\", \"masked_sentence\": \"Louis Jules Trochu ([lwi \\u0292yl t\\u0281\\u0254\\u0283y]; 12 March 1815 \\u2013 7 October 1896) was a [MASK] military leader and politician.\"}, {\"sub_surface\": \"Louis Jules Trochu\", \"obj_surface\": \"French\", \"masked_sentence\": \"Louis Jules Trochu ([lwi \\u0292yl t\\u0281\\u0254\\u0283y]; 12 March 1815 \\u2013 7 October 1896) was a [MASK] military leader and politician.\"}]}\r\n``` \r\n\r\nWhat is the best way to deal with these duplicates if I want to use `datasets` to probe with LAMA? ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2218\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2218\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2217","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2217\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2217\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2217\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2217","id":857011314,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE0NTAxNjIz","number":2217,"title":"Revert breaking change in cache_files property","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-13T14:20:04Z","updated_at":"2021-04-14T14:24:24Z","closed_at":"2021-04-14T14:24:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2217","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2217","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2217.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2217.patch","merged_at":"2021-04-14T14:24:23Z"},"body":"#2025 changed the format of `Dataset.cache_files`.\r\nBefore it was formatted like\r\n```python\r\n[{\"filename\": \"path\/to\/file.arrow\", \"start\": 0, \"end\": 1337}]\r\n```\r\nand it was changed to\r\n```python\r\n[\"path\/to\/file.arrow\"]\r\n```\r\nsince there's no start\/end offsets available anymore.\r\n\r\nTo make this less breaking, I'm setting the format back to a list of dicts:\r\n```python\r\n[{\"filename\": \"path\/to\/file.arrow\"}]\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2217\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2217\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2216","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2216\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2216\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2216\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2216","id":856955534,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE0NDU0MjE1","number":2216,"title":"added real label for glue\/mrpc to test set","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-13T13:20:20Z","updated_at":"2021-04-13T13:53:20Z","closed_at":"2021-04-13T13:53:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2216","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2216","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2216.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2216.patch","merged_at":"2021-04-13T13:53:19Z"},"body":"Added real label to `glue.py` `mrpc` task for test split.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2216\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2216\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2215","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2215\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2215\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2215\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2215","id":856716791,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE0MjUyNTEy","number":2215,"title":"Add datasets SLR35 and SLR36 to OpenSLR ","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-13T08:24:07Z","updated_at":"2021-04-13T14:05:14Z","closed_at":"2021-04-13T14:05:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2215","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2215","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2215.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2215.patch","merged_at":"2021-04-13T14:05:14Z"},"body":"I would like to add [SLR35](https:\/\/openslr.org\/35\/) (18GB) and [SLR36](https:\/\/openslr.org\/36\/) (22GB) which are Large Javanese and Sundanese ASR training data set collected by Google in collaboration with Reykjavik University and Universitas Gadjah Mada in Indonesia.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2215\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2215\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2214","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2214\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2214\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2214\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2214","id":856333657,"node_id":"MDU6SXNzdWU4NTYzMzM2NTc=","number":2214,"title":"load_metric error: module 'datasets.utils.file_utils' has no attribute 'add_start_docstrings'","user":{"login":"nsaphra","id":414788,"node_id":"MDQ6VXNlcjQxNDc4OA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/414788?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nsaphra","html_url":"https:\/\/github.com\/nsaphra","followers_url":"https:\/\/api.github.com\/users\/nsaphra\/followers","following_url":"https:\/\/api.github.com\/users\/nsaphra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nsaphra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nsaphra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nsaphra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nsaphra\/orgs","repos_url":"https:\/\/api.github.com\/users\/nsaphra\/repos","events_url":"https:\/\/api.github.com\/users\/nsaphra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nsaphra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-12T20:26:01Z","updated_at":"2021-04-23T15:20:02Z","closed_at":"2021-04-23T15:20:02Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm having the same problem as [Notebooks issue 10](https:\/\/github.com\/huggingface\/notebooks\/issues\/10) on datasets 1.2.1, and it seems to be an issue with the datasets package.\r\n\r\n```python\r\n>>> from datasets import load_metric\r\n>>> metric = load_metric(\"glue\", \"sst2\")\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/ext3\/miniconda3\/lib\/python3.8\/site-packages\/datasets-1.2.1-py3.8.egg\/datasets\/load.py\", line 502, in load_metric\r\n File \"\/ext3\/miniconda3\/lib\/python3.8\/site-packages\/datasets-1.2.1-py3.8.egg\/datasets\/load.py\", line 66, in import_main_class\r\n File \"\/ext3\/miniconda3\/lib\/python3.8\/importlib\/__init__.py\", line 127, in import_module\r\n return _bootstrap._gcd_import(name[level:], package, level)\r\n File \"\", line 1014, in _gcd_import\r\n File \"\", line 991, in _find_and_load\r\n File \"\", line 975, in _find_and_load_unlocked\r\n File \"\", line 671, in _load_unlocked\r\n File \"\", line 783, in exec_module\r\n File \"\", line 219, in _call_with_frames_removed\r\n File \"\/home\/ns4008\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/glue\/e4606ab9804a36bcd5a9cebb2cb65bb14b6ac78ee9e6d5981fa679a495dd55de\/glue.py\", line 105, in \r\n @datasets.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)\r\nAttributeError: module 'datasets.utils.file_utils' has no attribute 'add_start_docstrings'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2214\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2214\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2213","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2213\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2213\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2213\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2213","id":856025320,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEzNjcwODk2","number":2213,"title":"Fix lc_quad download checksum","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-12T14:16:59Z","updated_at":"2021-04-14T22:04:54Z","closed_at":"2021-04-14T13:42:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2213","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2213","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2213.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2213.patch","merged_at":"2021-04-14T13:42:25Z"},"body":"Fixes #2211 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2213\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2213\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2212","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2212\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2212\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2212\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2212","id":855999133,"node_id":"MDU6SXNzdWU4NTU5OTkxMzM=","number":2212,"title":"Can't reach \"https:\/\/storage.googleapis.com\/illuin\/fquad\/train.json.zip\" when trying to load fquad dataset","user":{"login":"hanss0n","id":21348833,"node_id":"MDQ6VXNlcjIxMzQ4ODMz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21348833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hanss0n","html_url":"https:\/\/github.com\/hanss0n","followers_url":"https:\/\/api.github.com\/users\/hanss0n\/followers","following_url":"https:\/\/api.github.com\/users\/hanss0n\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hanss0n\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hanss0n\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hanss0n\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hanss0n\/orgs","repos_url":"https:\/\/api.github.com\/users\/hanss0n\/repos","events_url":"https:\/\/api.github.com\/users\/hanss0n\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hanss0n\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-12T13:49:56Z","updated_at":"2021-05-17T22:17:06Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to load the [fquad dataset](https:\/\/huggingface.co\/datasets\/fquad) by running: \r\n\r\n```Python\r\nfquad = load_dataset(\"fquad\")\r\n```\r\n\r\nwhich produces the following error:\r\n\r\n```\r\nUsing custom data configuration default\r\n\r\nDownloading and preparing dataset fquad\/default (download: 3.14 MiB, generated: 6.62 MiB, post-processed: Unknown size, total: 9.76 MiB) to \/root\/.cache\/huggingface\/datasets\/fquad\/default\/0.1.0\/778dc2c85813d05ddd0c17087294d5f8f24820752340958070876b677af9f061...\r\n\r\n---------------------------------------------------------------------------\r\n\r\nConnectionError Traceback (most recent call last)\r\n\r\n in ()\r\n----> 1 fquad = load_dataset(\"fquad\")\r\n\r\n11 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag, max_retries, use_auth_token)\r\n 614 raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\n 615 _raise_if_offline_mode_is_enabled(f\"Tried to reach {url}\")\r\n--> 616 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n 617 \r\n 618 # Try a second time\r\n\r\nConnectionError: Couldn't reach https:\/\/storage.googleapis.com\/illuin\/fquad\/train.json.zip\r\n```\r\n\r\nDoes anyone know why that is and how to fix it? ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2212\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2212\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2211","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2211\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2211\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2211\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2211","id":855988410,"node_id":"MDU6SXNzdWU4NTU5ODg0MTA=","number":2211,"title":"Getting checksum error when trying to load lc_quad dataset","user":{"login":"hanss0n","id":21348833,"node_id":"MDQ6VXNlcjIxMzQ4ODMz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21348833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hanss0n","html_url":"https:\/\/github.com\/hanss0n","followers_url":"https:\/\/api.github.com\/users\/hanss0n\/followers","following_url":"https:\/\/api.github.com\/users\/hanss0n\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hanss0n\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hanss0n\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hanss0n\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hanss0n\/orgs","repos_url":"https:\/\/api.github.com\/users\/hanss0n\/repos","events_url":"https:\/\/api.github.com\/users\/hanss0n\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hanss0n\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-12T13:38:58Z","updated_at":"2021-04-14T13:42:25Z","closed_at":"2021-04-14T13:42:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm having issues loading the [lc_quad](https:\/\/huggingface.co\/datasets\/fquad) dataset by running:\r\n\r\n```Python\r\nlc_quad = load_dataset(\"lc_quad\")\r\n```\r\n\r\nwhich is giving me the following error:\r\n\r\n``` \r\nUsing custom data configuration default\r\n\r\nDownloading and preparing dataset lc_quad\/default (download: 3.69 MiB, generated: 19.77 MiB, post-processed: Unknown size, total: 23.46 MiB) to \/root\/.cache\/huggingface\/datasets\/lc_quad\/default\/2.0.0\/5a98fe174603f5dec6df07edf1c2b4d2317210d2ad61f5a393839bca4d64e5a7...\r\n\r\n---------------------------------------------------------------------------\r\n\r\nNonMatchingChecksumError Traceback (most recent call last)\r\n\r\n in ()\r\n----> 1 lc_quad = load_dataset(\"lc_quad\")\r\n\r\n3 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 37 if len(bad_urls) > 0:\r\n 38 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 39 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 40 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 41 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/github.com\/AskNowQA\/LC-QuAD2.0\/archive\/master.zip']\r\n```\r\n\r\nDoes anyone know why this could be and how I fix it? ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2211\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2211\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2210","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2210\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2210\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2210\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2210","id":855709400,"node_id":"MDU6SXNzdWU4NTU3MDk0MDA=","number":2210,"title":"dataloading slow when using HUGE dataset","user":{"login":"hwijeen","id":29157715,"node_id":"MDQ6VXNlcjI5MTU3NzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29157715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hwijeen","html_url":"https:\/\/github.com\/hwijeen","followers_url":"https:\/\/api.github.com\/users\/hwijeen\/followers","following_url":"https:\/\/api.github.com\/users\/hwijeen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hwijeen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hwijeen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hwijeen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hwijeen\/orgs","repos_url":"https:\/\/api.github.com\/users\/hwijeen\/repos","events_url":"https:\/\/api.github.com\/users\/hwijeen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hwijeen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-12T08:33:02Z","updated_at":"2021-04-13T02:03:05Z","closed_at":"2021-04-13T02:03:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nWhen I use datasets with 600GB data, the dataloading speed increases significantly. \r\nI am experimenting with two datasets, and one is about 60GB and the other 600GB.\r\nSimply speaking, my code uses `datasets.set_format(\"torch\")` function and let pytorch-lightning handle ddp training.\r\nWhen looking at the pytorch-lightning supported profile of two different runs, I see that fetching a batch(`get_train_batch`) consumes an unreasonable amount of time when data is large. What could be the cause?\r\n\r\n* 60GB data\r\n```\r\nAction \t| Mean duration (s)\t|Num calls \t| Total time (s) \t| Percentage % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nTotal \t| - \t|_ \t| 200.33 \t| 100 % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nrun_training_epoch \t| 71.994 \t|1 \t| 71.994 \t| 35.937 \t|\r\nrun_training_batch \t| 0.64373 \t|100 \t| 64.373 \t| 32.133 \t|\r\noptimizer_step_and_closure_0 \t| 0.64322 \t|100 \t| 64.322 \t| 32.108 \t|\r\ntraining_step_and_backward \t| 0.61004 \t|100 \t| 61.004 \t| 30.452 \t|\r\nmodel_backward \t| 0.37552 \t|100 \t| 37.552 \t| 18.745 \t|\r\nmodel_forward \t| 0.22813 \t|100 \t| 22.813 \t| 11.387 \t|\r\ntraining_step \t| 0.22759 \t|100 \t| 22.759 \t| 11.361 \t|\r\nget_train_batch \t| 0.066385 \t|100 \t| 6.6385 \t| 3.3138 \t|\r\n```\r\n\r\n* 600GB data\r\n```\r\nAction \t| Mean duration (s)\t|Num calls \t| Total time (s) \t| Percentage % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nTotal \t| - \t|_ \t| 3285.6 \t| 100 % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nrun_training_epoch \t| 1397.9 \t|1 \t| 1397.9 \t| 42.546 \t|\r\nrun_training_batch \t| 7.2596 \t|100 \t| 725.96 \t| 22.095 \t|\r\noptimizer_step_and_closure_0 \t| 7.2589 \t|100 \t| 725.89 \t| 22.093 \t|\r\ntraining_step_and_backward \t| 7.223 \t|100 \t| 722.3 \t| 21.984 \t|\r\nmodel_backward \t| 6.9662 \t|100 \t| 696.62 \t| 21.202 \t|\r\nget_train_batch \t| 6.322 \t|100 \t| 632.2 \t| 19.241 \t|\r\nmodel_forward \t| 0.24902 \t|100 \t| 24.902 \t| 0.75789 \t|\r\ntraining_step \t| 0.2485 \t|100 \t| 24.85 \t| 0.75633 \t|\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2210\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2210\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2209","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2209\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2209\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2209\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2209","id":855638232,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEzMzQwMTI2","number":2209,"title":"Add code of conduct to the project","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-12T07:16:14Z","updated_at":"2021-04-12T17:55:52Z","closed_at":"2021-04-12T17:55:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2209","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2209","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2209.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2209.patch","merged_at":"2021-04-12T17:55:52Z"},"body":"Add code of conduct to the project and link it from README and CONTRIBUTING.\r\n\r\nThis was already done in `transformers`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2209\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2209\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2208","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2208\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2208\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2208\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2208","id":855343835,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEzMTAxMzMw","number":2208,"title":"Remove Python2 leftovers","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-11T16:08:03Z","updated_at":"2021-04-14T22:05:36Z","closed_at":"2021-04-14T13:40:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2208","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2208","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2208.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2208.patch","merged_at":"2021-04-14T13:40:50Z"},"body":"This PR removes Python2 leftovers since this project aims for Python3.6+ (and as of 2020 Python2 is no longer officially supported)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2208\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2208\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2207","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2207\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2207\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2207\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2207","id":855267383,"node_id":"MDU6SXNzdWU4NTUyNjczODM=","number":2207,"title":"making labels consistent across the datasets","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-11T10:03:56Z","updated_at":"2021-04-14T14:02:00Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nFor accessing the labels one can type \r\n```\r\n>>> a.features['label']\r\nClassLabel(num_classes=3, names=['entailment', 'neutral', 'contradiction'], names_file=None, id=None)\r\n```\r\nThe labels however are not consistent with the actual labels sometimes, for instance in case of XNLI, the actual labels are 0,1,2, but if one try to access as above they are entailment, neutral,contradiction,\r\nit would be great to have the labels consistent.\r\n\r\nthanks \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2207\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2207\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2206","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2206\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2206\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2206\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2206","id":855252415,"node_id":"MDU6SXNzdWU4NTUyNTI0MTU=","number":2206,"title":"Got pyarrow error when loading a dataset while adding special tokens into the tokenizer","user":{"login":"yana-xuyan","id":38536635,"node_id":"MDQ6VXNlcjM4NTM2NjM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38536635?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yana-xuyan","html_url":"https:\/\/github.com\/yana-xuyan","followers_url":"https:\/\/api.github.com\/users\/yana-xuyan\/followers","following_url":"https:\/\/api.github.com\/users\/yana-xuyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yana-xuyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yana-xuyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yana-xuyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yana-xuyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/yana-xuyan\/repos","events_url":"https:\/\/api.github.com\/users\/yana-xuyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yana-xuyan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-04-11T08:40:09Z","updated_at":"2021-11-10T12:18:30Z","closed_at":"2021-11-10T12:04:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I added five more special tokens into the GPT2 tokenizer. But after that, when I try to pre-process the data using my previous code, I got an error shown below:\r\n\r\nTraceback (most recent call last):\r\n File \"\/home\/xuyan\/anaconda3\/envs\/convqa\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1687, in _map_single\r\n writer.write(example)\r\n File \"\/home\/xuyan\/anaconda3\/envs\/convqa\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 296, in write\r\n self.write_on_file()\r\n File \"\/home\/xuyan\/anaconda3\/envs\/convqa\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 270, in write_on_file\r\n pa_array = pa.array(typed_sequence)\r\n File \"pyarrow\/array.pxi\", line 222, in pyarrow.lib.array\r\n File \"pyarrow\/array.pxi\", line 110, in pyarrow.lib._handle_arrow_array_protocol\r\n File \"\/home\/xuyan\/anaconda3\/envs\/convqa\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 108, in __arrow_array__\r\n out = out.cast(pa.list_(self.optimized_int_type))\r\n File \"pyarrow\/array.pxi\", line 810, in pyarrow.lib.Array.cast\r\n File \"\/home\/xuyan\/anaconda3\/envs\/convqa\/lib\/python3.7\/site-packages\/pyarrow\/compute.py\", line 281, in cast\r\n return call_function(\"cast\", [arr], options)\r\n File \"pyarrow\/_compute.pyx\", line 465, in pyarrow._compute.call_function\r\n File \"pyarrow\/_compute.pyx\", line 294, in pyarrow._compute.Function.call\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Integer value 50259 not in range: -128 to 127\r\n\r\nDo you have any idea about it?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2206\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2206\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2205","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2205\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2205\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2205\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2205","id":855207605,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEzMDAwMzYw","number":2205,"title":"Updating citation information on LinCE readme","user":{"login":"gaguilar","id":5833357,"node_id":"MDQ6VXNlcjU4MzMzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5833357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gaguilar","html_url":"https:\/\/github.com\/gaguilar","followers_url":"https:\/\/api.github.com\/users\/gaguilar\/followers","following_url":"https:\/\/api.github.com\/users\/gaguilar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gaguilar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gaguilar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gaguilar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gaguilar\/orgs","repos_url":"https:\/\/api.github.com\/users\/gaguilar\/repos","events_url":"https:\/\/api.github.com\/users\/gaguilar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gaguilar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-11T03:18:05Z","updated_at":"2021-04-12T17:53:34Z","closed_at":"2021-04-12T17:53:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2205","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2205","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2205.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2205.patch","merged_at":"2021-04-12T17:53:34Z"},"body":"Hi!\r\n\r\nI just updated the citation information in this PR. It had an additional bibtex from one of the datasets used in LinCE and then the LinCE bibtex. I removed the former and added a link that shows the full list of citations for each dataset. \r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2205\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2205\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2204","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2204\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2204\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2204\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2204","id":855144431,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyOTU1MzM2","number":2204,"title":"Add configurable options to `seqeval` metric","user":{"login":"marrodion","id":44571847,"node_id":"MDQ6VXNlcjQ0NTcxODQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44571847?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/marrodion","html_url":"https:\/\/github.com\/marrodion","followers_url":"https:\/\/api.github.com\/users\/marrodion\/followers","following_url":"https:\/\/api.github.com\/users\/marrodion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/marrodion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/marrodion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/marrodion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/marrodion\/orgs","repos_url":"https:\/\/api.github.com\/users\/marrodion\/repos","events_url":"https:\/\/api.github.com\/users\/marrodion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/marrodion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-10T19:58:19Z","updated_at":"2021-04-15T13:49:46Z","closed_at":"2021-04-15T13:49:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2204","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2204","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2204.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2204.patch","merged_at":"2021-04-15T13:49:46Z"},"body":"Fixes #2148\r\n\r\nAdds options to use strict mode, different schemes of evaluation, sample weight and adjust zero_division behavior, if encountered.\r\n\r\n`seqeval` provides schemes as objects, hence dynamic import from string, to avoid making the user do the import (thanks to @albertvillanova for the `importlib` idea).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2204\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2204\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2203","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2203\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2203\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2203\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2203","id":855053595,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyODg4MzA5","number":2203,"title":"updated banking77 train and test data","user":{"login":"hsali","id":6765330,"node_id":"MDQ6VXNlcjY3NjUzMzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6765330?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hsali","html_url":"https:\/\/github.com\/hsali","followers_url":"https:\/\/api.github.com\/users\/hsali\/followers","following_url":"https:\/\/api.github.com\/users\/hsali\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hsali\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hsali\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hsali\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hsali\/orgs","repos_url":"https:\/\/api.github.com\/users\/hsali\/repos","events_url":"https:\/\/api.github.com\/users\/hsali\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hsali\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-10T12:10:10Z","updated_at":"2021-04-23T14:33:39Z","closed_at":"2021-04-23T14:33:39Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2203","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2203","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2203.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2203.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2203\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2203\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2202","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2202\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2202\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2202\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2202","id":854501109,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyNDM2ODMx","number":2202,"title":"Add classes GenerateMode, DownloadConfig and Version to the documentation","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-09T12:58:19Z","updated_at":"2021-04-12T17:58:00Z","closed_at":"2021-04-12T17:57:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2202","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2202","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2202.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2202.patch","merged_at":"2021-04-12T17:57:59Z"},"body":"Add documentation for classes `GenerateMode`, `DownloadConfig` and `Version`.\r\n\r\nUpdate the docstring of `load_dataset` to create cross-reference links to the classes.\r\n\r\nRelated to #2187.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2202\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2202\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2201","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2201\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2201\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2201\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2201","id":854499563,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyNDM1NTE3","number":2201,"title":"Fix ArrowWriter overwriting features in ArrowBasedBuilder","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-09T12:56:19Z","updated_at":"2021-04-12T13:32:17Z","closed_at":"2021-04-12T13:32:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2201","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2201","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2201.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2201.patch","merged_at":"2021-04-12T13:32:16Z"},"body":"This should fix the issues with CSV loading experienced in #2153 and #2200.\r\n\r\nThe CSV builder is an ArrowBasedBuilder that had an issue with its ArrowWriter used to write the arrow file from the csv data.\r\nThe writer wasn't initialized with the features passed by the user. Therefore the writer was inferring the features from the arrow data, discarding the features passed by the user.\r\n\r\nI fixed that and I updated the tests","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2201\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2201\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2200","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2200\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2200\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2200\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2200","id":854449656,"node_id":"MDU6SXNzdWU4NTQ0NDk2NTY=","number":2200,"title":"_prepare_split will overwrite DatasetBuilder.info.features","user":{"login":"Gforky","id":4157614,"node_id":"MDQ6VXNlcjQxNTc2MTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4157614?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Gforky","html_url":"https:\/\/github.com\/Gforky","followers_url":"https:\/\/api.github.com\/users\/Gforky\/followers","following_url":"https:\/\/api.github.com\/users\/Gforky\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Gforky\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Gforky\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Gforky\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Gforky\/orgs","repos_url":"https:\/\/api.github.com\/users\/Gforky\/repos","events_url":"https:\/\/api.github.com\/users\/Gforky\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Gforky\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-04-09T11:47:13Z","updated_at":"2021-06-04T10:37:35Z","closed_at":"2021-06-04T10:37:35Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, here is my issue:\r\nI initialized a Csv datasetbuilder with specific features:\r\n```\r\ndef get_dataset_features(data_args):\r\n features = {}\r\n if data_args.text_features:\r\n features.update({text_feature: hf_features.Value(\"string\") for text_feature in data_args.text_features.strip().split(\",\")})\r\n if data_args.num_features:\r\n features.update({text_feature: hf_features.Value(\"float32\") for text_feature in data_args.num_features.strip().split(\",\")})\r\n if data_args.label_classes:\r\n features[\"label\"] = hf_features.ClassLabel(names=data_args.label_classes.strip().split(\",\"))\r\n else:\r\n features[\"label\"] = hf_features.Value(\"float32\")\r\n return hf_features.Features(features)\r\n\r\ndatasets = load_dataset(extension,\r\n data_files=data_files,\r\n sep=data_args.delimiter,\r\n header=data_args.header,\r\n column_names=data_args.column_names.split(\",\") if data_args.column_names else None,\r\n features=get_dataset_features(data_args=data_args))\r\n```\r\nThe `features` is printout as below before `builder_instance.as_dataset` is called:\r\n```\r\n{'label': ClassLabel(num_classes=2, names=['unacceptable', 'acceptable'], names_file=None, id=None), 'notated': Value(dtype='string', id=None), 'sentence': Value(dtype='string', id=None), 'src_code': Value(dtype='string', id=None)}\r\n````\r\n\r\nBut after the `builder_instance.as_dataset` is called for Csv dataset builder, the `features` is changed to:\r\n```\r\n{'label': Value(dtype='int64', id=None), 'notated': Value(dtype='string', id=None), 'sentence': Value(dtype='string', id=None), 'src_code': Value(dtype='string', id=None)}\r\n```\r\n\r\nAfter digged into the code, I releazed that in `ArrowBasedBuilder._prepare_split`, the DatasetBuilder's info's features will be overwrited by `ArrowWriter`'s `_features`. \r\nBut `ArrowWriter` is initailized without passing `features`.\r\nSo my concern is:\r\nIt's this overwrite must be done, or, should it be an option to pass features in `_prepare_split` function?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2200\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2200\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2199","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2199\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2199\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2199\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2199","id":854417318,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyMzY0ODU3","number":2199,"title":"Fix backward compatibility in Dataset.load_from_disk","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-09T11:01:10Z","updated_at":"2021-04-09T15:57:05Z","closed_at":"2021-04-09T15:57:05Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2199","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2199","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2199.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2199.patch","merged_at":"2021-04-09T15:57:05Z"},"body":"Fix backward compatibility when loading from disk an old dataset saved to disk with indices using key \"_indices_data_files\".\r\n\r\nRelated to #2195.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2199\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2199\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2198","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2198\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2198\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2198\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2198","id":854357481,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyMzE0MTIz","number":2198,"title":"added file_permission in load_dataset","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-09T09:39:06Z","updated_at":"2021-04-16T14:11:46Z","closed_at":"2021-04-16T14:11:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2198","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2198","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2198.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2198.patch","merged_at":null},"body":"As discussed in #2065 I've added `file_permission` argument in `load_dataset`. \r\n\r\nAdded mainly 2 things here:\r\n1) Permission of downloaded datasets when converted to .arrow files can be changed with argument `file_permission` argument in `load_dataset` (default is 0o644 only)\r\n2) Incase the user uses `map` later on to generate another cache file of dataset, it ensures the permissions of newly generated file are similar to that of` *-train.arrow` file inside cache_dir for that dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2198\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2198\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2197","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2197\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2197\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2197\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2197","id":854356559,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyMzEzMzQw","number":2197,"title":"fix missing indices_files in load_form_disk","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-09T09:37:57Z","updated_at":"2021-04-09T09:54:40Z","closed_at":"2021-04-09T09:54:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2197","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2197","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2197.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2197.patch","merged_at":"2021-04-09T09:54:39Z"},"body":"This should fix #2195\r\n\r\n`load_from_disk` was failing if there was no \"_indices_files\" field in state.json. This can happen if the dataset has no indices mapping","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2197\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2197\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2196","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2196\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2196\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2196\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2196","id":854126114,"node_id":"MDU6SXNzdWU4NTQxMjYxMTQ=","number":2196,"title":"`load_dataset` caches two arrow files?","user":{"login":"hwijeen","id":29157715,"node_id":"MDQ6VXNlcjI5MTU3NzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29157715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hwijeen","html_url":"https:\/\/github.com\/hwijeen","followers_url":"https:\/\/api.github.com\/users\/hwijeen\/followers","following_url":"https:\/\/api.github.com\/users\/hwijeen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hwijeen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hwijeen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hwijeen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hwijeen\/orgs","repos_url":"https:\/\/api.github.com\/users\/hwijeen\/repos","events_url":"https:\/\/api.github.com\/users\/hwijeen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hwijeen\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-09T03:49:19Z","updated_at":"2021-04-12T05:25:29Z","closed_at":"2021-04-12T05:25:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI am using datasets to load large json file of 587G.\r\nI checked the cached folder and found that there are two arrow files created:\r\n* `cache-ed205e500a7dc44c.arrow` - 355G\r\n* `json-train.arrow` - 582G\r\n\r\nWhy is the first file created?\r\nIf I delete it, would I still be able to `load_from_disk`?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2196\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2196\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2195","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2195\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2195\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2195\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2195","id":854070194,"node_id":"MDU6SXNzdWU4NTQwNzAxOTQ=","number":2195,"title":"KeyError: '_indices_files' in `arrow_dataset.py`","user":{"login":"samsontmr","id":15007950,"node_id":"MDQ6VXNlcjE1MDA3OTUw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15007950?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/samsontmr","html_url":"https:\/\/github.com\/samsontmr","followers_url":"https:\/\/api.github.com\/users\/samsontmr\/followers","following_url":"https:\/\/api.github.com\/users\/samsontmr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/samsontmr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/samsontmr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/samsontmr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/samsontmr\/orgs","repos_url":"https:\/\/api.github.com\/users\/samsontmr\/repos","events_url":"https:\/\/api.github.com\/users\/samsontmr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/samsontmr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-09T01:37:12Z","updated_at":"2021-04-09T09:55:09Z","closed_at":"2021-04-09T09:54:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After pulling the latest master, I'm getting a crash when `load_from_disk` tries to load my local dataset.\r\n\r\nTrace:\r\n```\r\nTraceback (most recent call last):\r\n File \"load_data.py\", line 11, in \r\n dataset = load_from_disk(SRC)\r\n File \"\/opt\/conda\/envs\/py38\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 784, in load_from_disk\r\n return DatasetDict.load_from_disk(dataset_path, fs, keep_in_memory=keep_in_memory)\r\n File \"\/opt\/conda\/envs\/py38\/lib\/python3.8\/site-packages\/datasets\/dataset_dict.py\", line 692, in load_from_disk\r\n dataset_dict[k] = Dataset.load_from_disk(dataset_dict_split_path, fs, keep_in_memory=keep_in_memory)\r\n File \"\/opt\/conda\/envs\/py38\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 634, in load_from_disk\r\n if state[\"_indices_files\"]:\r\nKeyError: '_indices_files'\r\n```\r\n\r\nI believe this is the line causing the error since there may not be a `_indices_files` key in the older versions:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/b70141e3c5149430951773aaa0155555c5fb3e76\/src\/datasets\/arrow_dataset.py#L634\r\n\r\nMay I suggest using `state.get()` instead of directly indexing the dictionary?\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2195\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2195\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2194","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2194\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2194\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2194\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2194","id":853909452,"node_id":"MDU6SXNzdWU4NTM5MDk0NTI=","number":2194,"title":"py3.7: TypeError: can't pickle _LazyModule objects","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-08T21:02:48Z","updated_at":"2021-04-09T16:56:50Z","closed_at":"2021-04-09T01:52:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"While this works fine with py3.8, under py3.7, with a totally new conda env and transformers install:\r\n\r\n```\r\ngit clone https:\/\/github.com\/huggingface\/transformers\r\ncd transformers\r\npip install -e .[testing]\r\n\r\nexport BS=1; rm -rf \/tmp\/test-clm; PYTHONPATH=src USE_TF=0 CUDA_VISIBLE_DEVICES=0 python \\\r\nexamples\/language-modeling\/run_clm.py --model_name_or_path distilgpt2 --dataset_name wikitext \\\r\n--dataset_config_name wikitext-2-raw-v1 --do_train --max_train_samples 1 \\\r\n--per_device_train_batch_size $BS --output_dir \/tmp\/test-clm --block_size 128 --logging_steps 1 \\\r\n--fp16\r\n```\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"examples\/language-modeling\/run_clm.py\", line 453, in \r\n main()\r\n File \"examples\/language-modeling\/run_clm.py\", line 336, in main\r\n load_from_cache_file=not data_args.overwrite_cache,\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py\", line 303, in map\r\n for k, dataset in self.items()\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py\", line 303, in \r\n for k, dataset in self.items()\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1259, in map\r\n update_data=update_data,\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 157, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 158, in wrapper\r\n self._fingerprint, transform, kwargs_for_fingerprint\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 105, in update_fingerprint\r\n hasher.update(transform_args[key])\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 57, in update\r\n self.m.update(self.hash(value).encode(\"utf-8\"))\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 53, in hash\r\n return cls.hash_default(value)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 46, in hash_default\r\n return cls.hash_bytes(dumps(value))\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 389, in dumps\r\n dump(obj, file)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 361, in dump\r\n Pickler(file, recurse=True).dump(obj)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 454, in dump\r\n StockPickler.dump(self, obj)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 437, in dump\r\n self.save(obj)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 556, in save_function\r\n obj=obj,\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 941, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 859, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 885, in _batch_setitems\r\n save(v)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 524, in save\r\n rv = reduce(self.proto)\r\nTypeError: can't pickle _LazyModule objects\r\n```\r\n```\r\n$ python --version\r\nPython 3.7.4\r\n\r\n$ python -m torch.utils.collect_env\r\nCollecting environment information...\r\nPyTorch version: 1.8.0.dev20210110+cu110\r\nIs debug build: False\r\nCUDA used to build PyTorch: 11.0\r\nROCM used to build PyTorch: N\/A\r\n\r\nOS: Ubuntu 20.04.2 LTS (x86_64)\r\nGCC version: (Ubuntu 9.3.0-17ubuntu1~20.04) 9.3.0\r\nClang version: 10.0.0-4ubuntu1 \r\nCMake version: version 3.16.3\r\n```\r\n\r\nThanks.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2194\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2194\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2193","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2193\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2193\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2193\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2193","id":853725707,"node_id":"MDU6SXNzdWU4NTM3MjU3MDc=","number":2193,"title":"Filtering\/mapping on one column is very slow","user":{"login":"norabelrose","id":39116809,"node_id":"MDQ6VXNlcjM5MTE2ODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39116809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/norabelrose","html_url":"https:\/\/github.com\/norabelrose","followers_url":"https:\/\/api.github.com\/users\/norabelrose\/followers","following_url":"https:\/\/api.github.com\/users\/norabelrose\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/norabelrose\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/norabelrose\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/norabelrose\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/norabelrose\/orgs","repos_url":"https:\/\/api.github.com\/users\/norabelrose\/repos","events_url":"https:\/\/api.github.com\/users\/norabelrose\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/norabelrose\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2021-04-08T18:16:14Z","updated_at":"2021-04-26T16:13:59Z","closed_at":"2021-04-26T16:13:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm currently using the `wikipedia` dataset\u2014 I'm tokenizing the articles with the `tokenizers` library using `map()` and also adding a new `num_tokens` column to the dataset as part of that map operation.\r\n\r\nI want to be able to _filter_ the dataset based on this `num_tokens` column, but even when I specify `input_columns=['num_tokens']`, it seems that the entirety of each row is loaded into memory, which makes the operation take much longer than it should. Indeed, `filter` currently just calls `map`, and I found that in `_map_single` on lines 1690-1704 of `arrow_dataset.py`, the method is just grabbing slices of _all the rows_ of the dataset and then passing only the specified columns to the map function. It seems that, when the user passes a value for `input_columns`, the `map` function should create a temporary pyarrow table by selecting just those columns, and then get slices from that table. Or something like that\u2014 I'm not very familiar with the pyarrow API.\r\n\r\nI know that in the meantime I can sort of get around this by simply only returning the rows that match my filter criterion from the tokenizing function I pass to `map()`, but I actually _also_ want to map on just the `num_tokens` column in order to compute batches with a roughly uniform number of tokens per batch. I would also ideally like to be able to change my minimum and maximum article lengths without having to re-tokenize the entire dataset.\r\n\r\nPS: This is definitely not a \"dataset request.\" I'm realizing that I don't actually know how to remove labels from my own issues on other people's repos, if that is even possible.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2193\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2193\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2192","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2192\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2192\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2192\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2192","id":853547910,"node_id":"MDExOlB1bGxSZXF1ZXN0NjExNjE5NTY0","number":2192,"title":"Fix typo in huggingface hub","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-08T14:42:24Z","updated_at":"2021-04-08T15:47:41Z","closed_at":"2021-04-08T15:47:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2192","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2192","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2192.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2192.patch","merged_at":"2021-04-08T15:47:40Z"},"body":"pip knows how to resolve to `huggingface_hub`, but conda doesn't!\r\n\r\nThe `packaging` dependency is also required for the build to complete.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2192\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2192\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2191","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2191\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2191\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2191\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2191","id":853364204,"node_id":"MDExOlB1bGxSZXF1ZXN0NjExNDY1Nzc0","number":2191,"title":"Refactorize tests to use Dataset as context manager","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/1","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1\/labels","id":6644198,"node_id":"MDk6TWlsZXN0b25lNjY0NDE5OA==","number":1,"title":"1.6","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":4,"state":"closed","created_at":"2021-04-09T13:07:51Z","updated_at":"2021-04-20T16:50:46Z","due_on":"2021-04-16T07:00:00Z","closed_at":"2021-04-20T16:50:46Z"},"comments":4,"created_at":"2021-04-08T11:21:04Z","updated_at":"2021-04-19T07:53:11Z","closed_at":"2021-04-19T07:53:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2191","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2191","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2191.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2191.patch","merged_at":"2021-04-19T07:53:10Z"},"body":"Refactorize Dataset tests to use Dataset as context manager.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2191\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2191\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2190","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2190\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2190\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2190\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2190","id":853181564,"node_id":"MDU6SXNzdWU4NTMxODE1NjQ=","number":2190,"title":"News_commentary Dataset Translation Pairs are of Incorrect Language Specified Pairs","user":{"login":"anassalamah","id":8571003,"node_id":"MDQ6VXNlcjg1NzEwMDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8571003?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anassalamah","html_url":"https:\/\/github.com\/anassalamah","followers_url":"https:\/\/api.github.com\/users\/anassalamah\/followers","following_url":"https:\/\/api.github.com\/users\/anassalamah\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anassalamah\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anassalamah\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anassalamah\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anassalamah\/orgs","repos_url":"https:\/\/api.github.com\/users\/anassalamah\/repos","events_url":"https:\/\/api.github.com\/users\/anassalamah\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anassalamah\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-08T07:53:43Z","updated_at":"2021-05-24T10:03:55Z","closed_at":"2021-05-24T10:03:55Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I used load_dataset to load the news_commentary dataset for \"ar-en\" translation pairs but found translations from Arabic to Hindi. \r\n\r\n```\r\ntrain_ds = load_dataset(\"news_commentary\", \"ar-en\", split='train[:98%]')\r\nval_ds = load_dataset(\"news_commentary\", \"ar-en\", split='train[98%:]')\r\n\r\n# filtering out examples that are not ar-en translations but ar-hi\r\nval_ds = val_ds.filter(lambda example, indice: indice not in chain(range(1312,1327) ,range(1384,1399), range(1030,1042)), with_indices=True)\r\n```\r\n\r\n* I'm fairly new to using datasets so I might be doing something wrong","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2190\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2190\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2189","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2189\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2189\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2189\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2189","id":853052891,"node_id":"MDU6SXNzdWU4NTMwNTI4OTE=","number":2189,"title":"save_to_disk doesn't work when we use concatenate_datasets function before creating the final dataset_object.","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-08T04:42:53Z","updated_at":"2021-04-14T13:57:05Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As you can see, it saves the entire dataset.\r\n\r\n@lhoestq \r\n\r\nYou can check by going through the following example,\r\n\r\n```\r\nfrom datasets import load_from_disk,concatenate_datasets\r\n\r\nloaded_data=load_from_disk('\/home\/gsir059\/HNSW-ori\/my_knowledge_dataset')\r\nn=20\r\nkb_list=[loaded_data.shard(n, i, contiguous=True) for i in range(n)]\r\nfinal_dataset=concatenate_datasets([kb_list[1],kb_list[2]])\r\nfinal_dataset.save_to_disk('\/home\/gsir059\/haha\/k.arrow')\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2189\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2189\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2188","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2188\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2188\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2188\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2188","id":853044166,"node_id":"MDU6SXNzdWU4NTMwNDQxNjY=","number":2188,"title":"Duplicate data in Timit dataset","user":{"login":"BHM-RB","id":78190188,"node_id":"MDQ6VXNlcjc4MTkwMTg4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/78190188?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BHM-RB","html_url":"https:\/\/github.com\/BHM-RB","followers_url":"https:\/\/api.github.com\/users\/BHM-RB\/followers","following_url":"https:\/\/api.github.com\/users\/BHM-RB\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BHM-RB\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BHM-RB\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BHM-RB\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BHM-RB\/orgs","repos_url":"https:\/\/api.github.com\/users\/BHM-RB\/repos","events_url":"https:\/\/api.github.com\/users\/BHM-RB\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BHM-RB\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-08T04:21:54Z","updated_at":"2021-04-08T12:13:19Z","closed_at":"2021-04-08T12:13:19Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I ran a simple code to list all texts in Timit dataset and the texts were all the same.\r\nIs this dataset corrupted?\r\n**Code:**\r\ntimit = load_dataset(\"timit_asr\")\r\nprint(*timit['train']['text'], sep='\\n')\r\n**Result:**\r\nWould such an act of refusal be useful?\r\nWould such an act of refusal be useful?\r\nWould such an act of refusal be useful?\r\nWould such an act of refusal be useful?\r\n...\r\n...\r\nWould such an act of refusal be useful?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2188\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2188\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2187","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2187\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2187\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2187\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2187","id":852939736,"node_id":"MDU6SXNzdWU4NTI5Mzk3MzY=","number":2187,"title":"Question (potential issue?) related to datasets caching","user":{"login":"ioana-blue","id":17202292,"node_id":"MDQ6VXNlcjE3MjAyMjky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17202292?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ioana-blue","html_url":"https:\/\/github.com\/ioana-blue","followers_url":"https:\/\/api.github.com\/users\/ioana-blue\/followers","following_url":"https:\/\/api.github.com\/users\/ioana-blue\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ioana-blue\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ioana-blue\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ioana-blue\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ioana-blue\/orgs","repos_url":"https:\/\/api.github.com\/users\/ioana-blue\/repos","events_url":"https:\/\/api.github.com\/users\/ioana-blue\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ioana-blue\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2021-04-08T00:16:28Z","updated_at":"2021-04-14T14:55:58Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I thought I had disabled datasets caching in my code, as follows:\r\n```\r\nfrom datasets import set_caching_enabled\r\n...\r\ndef main():\r\n\r\n # disable caching in datasets\r\n set_caching_enabled(False)\r\n```\r\nHowever, in my log files I see messages like the following:\r\n\r\n```\r\n04\/07\/2021 18:34:42 - WARNING - datasets.builder - Using custom data configuration default-888a87931cbc5877\r\n04\/07\/2021 18:34:42 - WARNING - datasets.builder - Reusing dataset csv (xxxx\/cache-transformers\/datasets\/csv\/default-888a87931cbc5877\/0.0.0\/965b6429be0fc05f975b608ce64e1fa941cc8fb4f30629b523d2390f3c0e1a93\r\n```\r\nCan you please let me know what this reusing dataset csv means? I wouldn't expect any reusing with the datasets caching disabled. Thank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2187\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2187\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2186","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2186\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2186\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2186\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2186","id":852840819,"node_id":"MDExOlB1bGxSZXF1ZXN0NjExMDMxNzE0","number":2186,"title":"GEM: new challenge sets","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-07T21:39:07Z","updated_at":"2021-04-07T21:56:35Z","closed_at":"2021-04-07T21:56:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2186","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2186","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2186.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2186.patch","merged_at":"2021-04-07T21:56:35Z"},"body":"This PR updates the GEM dataset to:\r\n- remove extraneous fields in WikiAuto after https:\/\/github.com\/huggingface\/datasets\/pull\/2171 fixed the source\r\n- add context and services to Schema Guided Dialog\r\n- Add new or update challenge sets for MLSUM ES and DE, XSUM, and SGD","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2186\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2186\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2185","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2185\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2185\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2185\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2185","id":852684395,"node_id":"MDU6SXNzdWU4NTI2ODQzOTU=","number":2185,"title":".map() and distributed training","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-04-07T18:22:14Z","updated_at":"2021-10-23T07:11:15Z","closed_at":"2021-04-09T15:38:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI have a question regarding distributed training and the `.map` call on a dataset.\r\n\r\nI have a local dataset \"my_custom_dataset\" that I am loading with `datasets = load_from_disk(dataset_path=my_path)`.\r\n`dataset` is then tokenized:\r\n```python\r\ndatasets = load_from_disk(dataset_path=my_path)\r\n\r\n[...]\r\n\r\ndef tokenize_function(examples):\r\n return tokenizer(examples[text_column_name])\r\n\r\nlogger.info(\"Mapping dataset to tokenized dataset.\")\r\ntokenized_datasets = datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n num_proc=preprocessing_num_workers,\r\n remove_columns=column_names,\r\n load_from_cache_file=True,\r\n)\r\n```\r\nI am using 31 workers (`preprocessing_num_workers=31`) and thus it creates 31 `cache*.arrow` files in `my_path\/train` (there is only a train split).\r\nWhen I relaunch the script, the map is tokenization is skipped in favor of loading the 31 previously cached files, and that's perfect.\r\n\r\nEverything so far was done by launching a **single process script**.\r\nI now launch the same training script in **distributed mode** (`pytorch -m torch.distributed.launch --nproc_per_node 2`). However, once it reaches the map call, it re-does the tokenization... instead of loading the 31 cached files. \r\n\r\nI tried adding the `cache_file_name` argument: `cache_file_name={\"train\": my_path\/one_of_the_arrow_file}`, but I can't give the 31 cached files, so it probably isn't the right way to do it.\r\n\r\n**My question: what is the best way to load cached files if they were pre-processed and dumped in multiple arrow files?** It seems automatically handled for single processes but fails on distributed training.\r\n\r\n- I am following the same structure as the examples of transformers (more specifically [run_clm.py](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/language-modeling\/run_clm.py) in my case)\r\n- I am using 1.5.0 version of datasets if that matters.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2185\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2185\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2184","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2184\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2184\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2184\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2184","id":852597258,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwODIxMTc0","number":2184,"title":"Implementation of class_encode_column","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-07T16:47:43Z","updated_at":"2021-04-16T11:44:37Z","closed_at":"2021-04-16T11:26:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2184","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2184","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2184.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2184.patch","merged_at":"2021-04-16T11:26:59Z"},"body":"Addresses #2176 \r\n\r\nI'm happy to discuss the API and internals!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2184\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2184\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2183","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2183\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2183\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2183\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2183","id":852518411,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwNzU3MjUz","number":2183,"title":"Fix s3fs tests for py36 and py37+","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-07T15:17:11Z","updated_at":"2021-04-08T08:54:45Z","closed_at":"2021-04-08T08:54:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2183","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2183","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2183.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2183.patch","merged_at":"2021-04-08T08:54:44Z"},"body":"Recently several changes happened:\r\n1. latest versions of `fsspec` require python>3.7 for async features\r\n2. `s3fs` added a dependency on `aiobotocore`, which is not compatible with the `moto` s3 mock context manager\r\n\r\nThis PR fixes both issues, by pinning `fsspec` and `s3fs` for python 3.6, and by using `moto` in server mode to support running the tests on python>=3.7 with the latest version of `fsspec` and `s3fs`.\r\n\r\ncc @philschmid ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2183\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2183\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2182","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2182\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2182\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2182\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2182","id":852384872,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwNjQ2MDIy","number":2182,"title":"Set default in-memory value depending on the dataset size","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/1","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1\/labels","id":6644198,"node_id":"MDk6TWlsZXN0b25lNjY0NDE5OA==","number":1,"title":"1.6","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":4,"state":"closed","created_at":"2021-04-09T13:07:51Z","updated_at":"2021-04-20T16:50:46Z","due_on":"2021-04-16T07:00:00Z","closed_at":"2021-04-20T16:50:46Z"},"comments":4,"created_at":"2021-04-07T13:00:18Z","updated_at":"2021-04-20T14:20:12Z","closed_at":"2021-04-20T10:04:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2182","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2182","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2182.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2182.patch","merged_at":"2021-04-20T10:04:03Z"},"body":"Set a default value for `in_memory` depending on the size of the dataset to be loaded.\r\n\r\nClose #2179.\r\n\r\nTODO:\r\n- [x] Add a section in the docs about this.\r\n- ~Add a warning if someone tries to specify `cache_file_name=` in `map`, `filter` etc. on a dataset that is in memory, since the computation is not going to be cached in this case.~","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2182\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2182\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2181","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2181\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2181\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2181\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2181","id":852261607,"node_id":"MDU6SXNzdWU4NTIyNjE2MDc=","number":2181,"title":"Error when loading a HUGE json file (pyarrow.lib.ArrowInvalid: straddling object straddles two block boundaries)","user":{"login":"hwijeen","id":29157715,"node_id":"MDQ6VXNlcjI5MTU3NzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29157715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hwijeen","html_url":"https:\/\/github.com\/hwijeen","followers_url":"https:\/\/api.github.com\/users\/hwijeen\/followers","following_url":"https:\/\/api.github.com\/users\/hwijeen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hwijeen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hwijeen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hwijeen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hwijeen\/orgs","repos_url":"https:\/\/api.github.com\/users\/hwijeen\/repos","events_url":"https:\/\/api.github.com\/users\/hwijeen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hwijeen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-04-07T10:26:46Z","updated_at":"2021-04-12T07:15:55Z","closed_at":"2021-04-12T07:15:55Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, thanks for the great library. I have used the brilliant library for a couple of small projects, and now using it for a fairly big project.\r\nWhen loading a huge json file of 500GB, pyarrow complains as follows:\r\n```\r\nTraceback (most recent call last):\r\n File \"\/home\/user\/.pyenv\/versions\/3.7.9\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 531, in incomplete_dir\r\n yield tmp_dir\r\n File \"\/home\/user\/.pyenv\/versions\/3.7.9\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 573, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/user\/.pyenv\/versions\/3.7.9\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 650, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/user\/.pyenv\/versions\/3.7.9\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1027, in _prepare_split\r\n for key, table in utils.tqdm(generator, unit=\" tables\", leave=False, disable=not_verbose):\r\n File \"\/home\/user\/.pyenv\/versions\/3.7.9\/lib\/python3.7\/site-packages\/tqdm\/std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"\/app\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/json\/9498524fd296a6cca99c66d6c5be507d1c0991f5a814e535b507f4a66096a641\/json.py\", line 83, in _generate_tables\r\n parse_options=self.config.pa_parse_options,\r\n File \"pyarrow\/_json.pyx\", line 247, in pyarrow._json.read_json\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: straddling object straddles two block boundaries (try to increase block size?)\r\n```\r\nWhen using only a small portion of the sample file, say first 100 lines, it works perfectly well..\r\n\r\nI see that it is the error from pyarrow, but could you give me a hint or possible solutions?\r\n#369 describes the same error and #372 claims to have fixed the issue, but I have no clue why I am still getting this one. Thanks in advance!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2181\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2181\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2180","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2180\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2180\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2180\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2180","id":852258635,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwNTQxOTA2","number":2180,"title":"Add tel to xtreme tatoeba","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-07T10:23:15Z","updated_at":"2021-04-07T15:50:35Z","closed_at":"2021-04-07T15:50:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2180","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2180","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2180.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2180.patch","merged_at":"2021-04-07T15:50:34Z"},"body":"This should fix issue #2149 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2180\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2180\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2179","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2179\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2179\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2179\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2179","id":852237957,"node_id":"MDU6SXNzdWU4NTIyMzc5NTc=","number":2179,"title":"Load small datasets in-memory instead of using memory map","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-04-07T09:58:16Z","updated_at":"2021-04-20T10:04:04Z","closed_at":"2021-04-20T10:04:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently all datasets are loaded using memory mapping by default in `load_dataset`.\r\nHowever this might not be necessary for small datasets. If a dataset is small enough, then it can be loaded in-memory and:\r\n- its memory footprint would be small so it's ok\r\n- in-memory computations\/queries would be faster\r\n- the caching on-disk would be disabled, making computations even faster (no I\/O bound because of the disk)\r\n- but running the same computation a second time would recompute everything since there would be no cached results on-disk. But this is probably fine since computations would be fast anyway + users should be able to provide a cache filename if needed.\r\n\r\nTherefore, maybe the default behavior of `load_dataset` should be to load small datasets in-memory and big datasets using memory mapping.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2179\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2179\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2178","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2178\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2178\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2178\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2178","id":852215058,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwNTA1Mjg1","number":2178,"title":"Fix cast memory usage by using map on subtables","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/1","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1\/labels","id":6644198,"node_id":"MDk6TWlsZXN0b25lNjY0NDE5OA==","number":1,"title":"1.6","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":4,"state":"closed","created_at":"2021-04-09T13:07:51Z","updated_at":"2021-04-20T16:50:46Z","due_on":"2021-04-16T07:00:00Z","closed_at":"2021-04-20T16:50:46Z"},"comments":3,"created_at":"2021-04-07T09:30:50Z","updated_at":"2021-04-20T14:20:44Z","closed_at":"2021-04-13T09:28:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2178","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2178","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2178.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2178.patch","merged_at":"2021-04-13T09:28:16Z"},"body":"The `cast` operation on a pyarrow Table may create new arrays in memory.\r\nThis is an issue since users expect memory mapped datasets to not fill up the RAM.\r\n\r\nTo fix that I used `map` to write a new arrow file on disk when cast is used.\r\nTo make things more convenient I introduced the `arrow` formatting of a dataset, to make it return pyarrow tables instead of python dicts. This way one can use pyarrow transforms directly when using `map`.\r\n\r\nedit: we'll use the same mechanism for `filter`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2178\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2178\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2177","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2177\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2177\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2177\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2177","id":852065307,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwMzc5MDYx","number":2177,"title":"add social thumbnial","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-07T06:40:06Z","updated_at":"2021-04-07T08:16:01Z","closed_at":"2021-04-07T08:16:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2177","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2177","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2177.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2177.patch","merged_at":"2021-04-07T08:16:01Z"},"body":"# What does this PR do?\r\n\r\nI added OpenGraph\/ Twitter Card support to the docs to create nice social thumbnails.\r\n\r\n![Bildschirmfoto 2021-04-07 um 08 36 50](https:\/\/user-images.githubusercontent.com\/32632186\/113821698-bac2ce80-977c-11eb-81aa-d8f16355857e.png)\r\n\r\nTo be able to add these I needed to install `sphinxext-opengraph`. I came across this [issue](https:\/\/github.com\/readthedocs\/readthedocs.org\/issues\/1758) on the readthedocs repo saying that since someone has built this plugin they are not integrating and providing documentation to it. That's why I added it for creating the documentation. The repository can be found [here](https:\/\/github.com\/wpilibsuite\/sphinxext-opengraph\/tree\/main).\r\n\r\nP.S. It seemed that `make style` never ran for `docs\/` i hope the changes are okay otherwise I'll revert it. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2177\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2177\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2176","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2176\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2176\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2176\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2176","id":851865795,"node_id":"MDU6SXNzdWU4NTE4NjU3OTU=","number":2176,"title":"Converting a Value to a ClassLabel","user":{"login":"nelson-liu","id":7272031,"node_id":"MDQ6VXNlcjcyNzIwMzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7272031?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nelson-liu","html_url":"https:\/\/github.com\/nelson-liu","followers_url":"https:\/\/api.github.com\/users\/nelson-liu\/followers","following_url":"https:\/\/api.github.com\/users\/nelson-liu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nelson-liu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nelson-liu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nelson-liu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nelson-liu\/orgs","repos_url":"https:\/\/api.github.com\/users\/nelson-liu\/repos","events_url":"https:\/\/api.github.com\/users\/nelson-liu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nelson-liu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-06T22:54:16Z","updated_at":"2021-04-19T10:10:34Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi!\r\n\r\nIn the docs for `cast`, it's noted that `For non-trivial conversion, e.g. string <-> ClassLabel you should use map() to update the Dataset.`\r\n\r\nWould it be possible to have an example that demonstrates such a string <-> ClassLabel conversion using `map`? Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2176\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2176\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2175","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2175\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2175\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2175\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2175","id":851836096,"node_id":"MDU6SXNzdWU4NTE4MzYwOTY=","number":2175,"title":"dataset.search_batch() function outputs all -1 indices sometime.","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-04-06T21:50:49Z","updated_at":"2021-04-16T12:21:16Z","closed_at":"2021-04-16T12:21:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am working with RAG and playing around with different faiss indexes. At the moment I use **index = faiss.index_factory(768, \"IVF65536_HNSW32,Flat\")**.\r\n\r\nDuring the retrieval phase exactly in [this line of retrieval_rag.py](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/src\/transformers\/models\/rag\/retrieval_rag.py#L231) an error issue when all retrieved indices are -1. Please refer to the screenshot of a PID worker. \r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/16892570\/113782387-37a67600-9786-11eb-9c29-acad661a9648.png)\r\n\r\n\r\nHere, my retrieve batch size is 2 and n_docs is 5. I can solve this by working around np. stack, but I want to ask, why we get an output index of -1. Do you have any idea :) ?\r\n\r\nIs this a problem of the index, where the faiss can't find any similar vector?\r\nIs there documentation on the output index being -1?\r\n\r\n@lhoestq \r\n ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2175\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2175\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2174","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2174\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2174\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2174\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2174","id":851383675,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA5ODE2OTQ2","number":2174,"title":"Pin docutils for better doc","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-06T12:40:20Z","updated_at":"2021-04-06T12:55:53Z","closed_at":"2021-04-06T12:55:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2174","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2174","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2174.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2174.patch","merged_at":"2021-04-06T12:55:53Z"},"body":"The latest release of docutils make the navbar in the documentation weird and the Markdown wrongly interpreted:\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/35901082\/113711773-5be55280-96b3-11eb-9b3b-9794f17709aa.png)\r\n\r\nWe had the same problem in Transformers and solved it by pinning docutils (a dep of sphinx).\r\n\r\nYou can see the version after the change [here](https:\/\/32769-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/index.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2174\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2174\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2173","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2173\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2173\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2173\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2173","id":851359284,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA5Nzk2NzI2","number":2173,"title":"Add OpenSLR dataset","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-06T12:08:34Z","updated_at":"2021-04-12T16:54:46Z","closed_at":"2021-04-12T16:54:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2173","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2173","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2173.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2173.patch","merged_at":"2021-04-12T16:54:45Z"},"body":"OpenSLR (https:\/\/openslr.org\/) is a site devoted to hosting speech and language resources, such as training corpora for speech recognition, and software related to speech recognition. There are around 80 speech datasets listed in OpenSLR, currently this PR includes only 9 speech datasets SLR41, SLR42, SLR43, SLR44, SLR63, SLR64, SLR65, SLR66 and SLR69 (Javanese, Khmer, Nepali and Sundanese, Malayalam, Marathi, Tamil, Telugu and Catalan). I can add other speech datasets gradually next time.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2173\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2173\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2172","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2172\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2172\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2172\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2172","id":851229399,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA5Njg4ODgx","number":2172,"title":"Pin fsspec lower than 0.9.0","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-06T09:19:09Z","updated_at":"2021-04-06T09:49:27Z","closed_at":"2021-04-06T09:49:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2172","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2172","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2172.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2172.patch","merged_at":"2021-04-06T09:49:26Z"},"body":"Today's release of `fsspec` 0.9.0 implied a new release of `s3fs` 0.6.0 but this version breaks the CI (see [here](https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/5312\/workflows\/490f3240-cd1c-4dd1-bb60-b416771c5584\/jobs\/32734) for example)\r\n\r\nI'm pinning `fsspec` until this has been resolved","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2172\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2172\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2171","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2171\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2171\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2171\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2171","id":851090662,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA5NTY4MDcw","number":2171,"title":"Fixed the link to wikiauto training data.","user":{"login":"mounicam","id":11708999,"node_id":"MDQ6VXNlcjExNzA4OTk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11708999?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mounicam","html_url":"https:\/\/github.com\/mounicam","followers_url":"https:\/\/api.github.com\/users\/mounicam\/followers","following_url":"https:\/\/api.github.com\/users\/mounicam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mounicam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mounicam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mounicam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mounicam\/orgs","repos_url":"https:\/\/api.github.com\/users\/mounicam\/repos","events_url":"https:\/\/api.github.com\/users\/mounicam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mounicam\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-06T07:13:11Z","updated_at":"2021-04-06T16:05:42Z","closed_at":"2021-04-06T16:05:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2171","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2171","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2171.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2171.patch","merged_at":"2021-04-06T16:05:09Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2171\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2171\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2170","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2170\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2170\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2170\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2170","id":850913228,"node_id":"MDU6SXNzdWU4NTA5MTMyMjg=","number":2170,"title":"Wikipedia historic dumps are deleted but hf\/datasets hardcodes dump date","user":{"login":"leezu","id":946903,"node_id":"MDQ6VXNlcjk0NjkwMw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/946903?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/leezu","html_url":"https:\/\/github.com\/leezu","followers_url":"https:\/\/api.github.com\/users\/leezu\/followers","following_url":"https:\/\/api.github.com\/users\/leezu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/leezu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/leezu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/leezu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/leezu\/orgs","repos_url":"https:\/\/api.github.com\/users\/leezu\/repos","events_url":"https:\/\/api.github.com\/users\/leezu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/leezu\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-06T03:13:18Z","updated_at":"2021-06-16T01:10:50Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Wikimedia does not keep all historical dumps. For example, as of today https:\/\/dumps.wikimedia.org\/kowiki\/ only provides\r\n\r\n```\r\n20201220\/ 02-Feb-2021 01:36 -\r\n20210101\/ 21-Feb-2021 01:26 -\r\n20210120\/ 02-Mar-2021 01:25 -\r\n20210201\/ 21-Mar-2021 01:26 -\r\n20210220\/ 02-Apr-2021 01:26 -\r\n20210301\/ 03-Mar-2021 08:10 -\r\n20210320\/ 21-Mar-2021 18:13 -\r\n20210401\/ 03-Apr-2021 10:08 -\r\nlatest\/ 03-Apr-2021 10:08 -\r\n```\r\n\r\nHowever, the wikipedia dataset provided in the library, only supports the following configs, none of which are applicable anymore when disregarding the cached datasets:\r\n\r\n```\r\nValueError: BuilderConfig 20210401.ko not found. Available: ['20200501.aa', '20200501.ab', '20200501.ace', '20200501.ady', '20200501.af', '20200501.ak', '20200501.als', '20200501.am', '20200501.an', '20200501.ang', '20200501.ar', '20200501.arc', '20200501.arz', '20200501.as', '20200501.ast', '20200501.atj', '20200501.av', '20200501.ay', '20200501.az', '20200501.azb', '20200501.ba', '20200501.bar', '20200501.bat-smg', '20200501.bcl', '20200501.be', '20200501.be-x-old', '20200501.bg', '20200501.bh', '20200501.bi', '20200501.bjn', '20200501.bm', '20200501.bn', '20200501.bo', '20200501.bpy', '20200501.br', '20200501.bs', '20200501.bug', '20200501.bxr', '20200501.ca', '20200501.cbk-zam', '20200501.cdo', '20200501.ce', '20200501.ceb', '20200501.ch', '20200501.cho', '20200501.chr', '20200501.chy', '20200501.ckb', '20200501.co', '20200501.cr', '20200501.crh', '20200501.cs', '20200501.csb', '20200501.cu', '20200501.cv', '20200501.cy', '20200501.da', '20200501.de', '20200501.din', '20200501.diq', '20200501.dsb', '20200501.dty', '20200501.dv', '20200501.dz', '20200501.ee', '20200501.el', '20200501.eml', '20200501.en', '20200501.eo', '20200501.es', '20200501.et', '20200501.eu', '20200501.ext', '20200501.fa', '20200501.ff', '20200501.fi', '20200501.fiu-vro', '20200501.fj', '20200501.fo', '20200501.fr', '20200501.frp', '20200501.frr', '20200501.fur', '20200501.fy', '20200501.ga', '20200501.gag', '20200501.gan', '20200501.gd', '20200501.gl', '20200501.glk', '20200501.gn', '20200501.gom', '20200501.gor', '20200501.got', '20200501.gu', '20200501.gv', '20200501.ha', '20200501.hak', '20200501.haw', '20200501.he', '20200501.hi', '20200501.hif', '20200501.ho', '20200501.hr', '20200501.hsb', '20200501.ht', '20200501.hu', '20200501.hy', '20200501.ia', '20200501.id', '20200501.ie', '20200501.ig', '20200501.ii', '20200501.ik', '20200501.ilo', '20200501.inh', '20200501.io', '20200501.is', '20200501.it', '20200501.iu', '20200501.ja', '20200501.jam', '20200501.jbo', '20200501.jv', '20200501.ka', '20200501.kaa', '20200501.kab', '20200501.kbd', '20200501.kbp', '20200501.kg', '20200501.ki', '20200501.kj', '20200501.kk', '20200501.kl', '20200501.km', '20200501.kn', '20200501.ko', '20200501.koi', '20200501.krc', '20200501.ks', '20200501.ksh', '20200501.ku', '20200501.kv', '20200501.kw', '20200501.ky', '20200501.la', '20200501.lad', '20200501.lb', '20200501.lbe', '20200501.lez', '20200501.lfn', '20200501.lg', '20200501.li', '20200501.lij', '20200501.lmo', '20200501.ln', '20200501.lo', '20200501.lrc', '20200501.lt', '20200501.ltg', '20200501.lv', '20200501.mai', '20200501.map-bms', '20200501.mdf', '20200501.mg', '20200501.mh', '20200501.mhr', '20200501.mi', '20200501.min', '20200501.mk', '20200501.ml', '20200501.mn', '20200501.mr', '20200501.mrj', '20200501.ms', '20200501.mt', '20200501.mus', '20200501.mwl', '20200501.my', '20200501.myv', '20200501.mzn', '20200501.na', '20200501.nah', '20200501.nap', '20200501.nds', '20200501.nds-nl', '20200501.ne', '20200501.new', '20200501.ng', '20200501.nl', '20200501.nn', '20200501.no', '20200501.nov', '20200501.nrm', '20200501.nso', '20200501.nv', '20200501.ny', '20200501.oc', '20200501.olo', '20200501.om', '20200501.or', '20200501.os', '20200501.pa', '20200501.pag', '20200501.pam', '20200501.pap', '20200501.pcd', '20200501.pdc', '20200501.pfl', '20200501.pi', '20200501.pih', '20200501.pl', '20200501.pms', '20200501.pnb', '20200501.pnt', '20200501.ps', '20200501.pt', '20200501.qu', '20200501.rm', '20200501.rmy', '20200501.rn', '20200501.ro', '20200501.roa-rup', '20200501.roa-tara', '20200501.ru', '20200501.rue', '20200501.rw', '20200501.sa', '20200501.sah', '20200501.sat', '20200501.sc', '20200501.scn', '20200501.sco', '20200501.sd', '20200501.se', '20200501.sg', '20200501.sh', '20200501.si', '20200501.simple', '20200501.sk', '20200501.sl', '20200501.sm', '20200501.sn', '20200501.so', '20200501.sq', '20200501.sr', '20200501.srn', '20200501.ss', '20200501.st', '20200501.stq', '20200501.su', '20200501.sv', '20200501.sw', '20200501.szl', '20200501.ta', '20200501.tcy', '20200501.te', '20200501.tet', '20200501.tg', '20200501.th', '20200501.ti', '20200501.tk', '20200501.tl', '20200501.tn', '20200501.to', '20200501.tpi', '20200501.tr', '20200501.ts', '20200501.tt', '20200501.tum', '20200501.tw', '20200501.ty', '20200501.tyv', '20200501.udm', '20200501.ug', '20200501.uk', '20200501.ur', '20200501.uz', '20200501.ve', '20200501.vec', '20200501.vep', '20200501.vi', '20200501.vls', '20200501.vo', '20200501.wa', '20200501.war', '20200501.wo', '20200501.wuu', '20200501.xal', '20200501.xh', '20200501.xmf', '20200501.yi', '20200501.yo', '20200501.za', '20200501.zea', '20200501.zh', '20200501.zh-classical', '20200501.zh-min-nan', '20200501.zh-yue', '20200501.zu']\r\n```\r\n\r\nThe cached datasets:\r\n\r\n```\r\n% aws s3 --no-sign-request --endpoint-url https:\/\/storage.googleapis.com ls s3:\/\/huggingface-nlp\/cache\/datasets\/wikipedia\/\r\n PRE 20200501.de\/\r\n PRE 20200501.en\/\r\n PRE 20200501.fr\/\r\n PRE 20200501.frr\/\r\n PRE 20200501.it\/\r\n PRE 20200501.simple\/\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2170\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2170\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2169","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2169\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2169\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2169\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2169","id":850456180,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA5MDI2ODUz","number":2169,"title":"Updated WER metric implementation to avoid memory issues","user":{"login":"diego-fustes","id":5707233,"node_id":"MDQ6VXNlcjU3MDcyMzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5707233?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/diego-fustes","html_url":"https:\/\/github.com\/diego-fustes","followers_url":"https:\/\/api.github.com\/users\/diego-fustes\/followers","following_url":"https:\/\/api.github.com\/users\/diego-fustes\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/diego-fustes\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/diego-fustes\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/diego-fustes\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/diego-fustes\/orgs","repos_url":"https:\/\/api.github.com\/users\/diego-fustes\/repos","events_url":"https:\/\/api.github.com\/users\/diego-fustes\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/diego-fustes\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-05T15:43:20Z","updated_at":"2021-04-06T15:02:58Z","closed_at":"2021-04-06T15:02:58Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2169","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2169","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2169.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2169.patch","merged_at":null},"body":"This is in order to fix this issue:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/issues\/2078\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2169\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2169\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2168","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2168\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2168\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2168\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2168","id":849957941,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA4NjA4Nzg5","number":2168,"title":"Preserve split type when realoding dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-04-04T20:46:21Z","updated_at":"2021-04-19T10:57:05Z","closed_at":"2021-04-19T09:08:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2168","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2168","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2168.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2168.patch","merged_at":"2021-04-19T09:08:55Z"},"body":"Fixes #2167 \r\n\r\nUsing `eval` is not ideal for security reasons (in web apps I assume), but without it the code would be much more complex IMO.\r\n\r\nIn terms of style, instead of explicitly importing a private member (`_RelativeInstruction`), we can add these imports at the top of the module:\r\n```python\r\nfrom . import arrow_reader # gives us access to ReadInstruction and _RelativeInstruction\r\nfrom . import splits # gives us access to NamedSplit\r\n```\r\n\r\nand then define the `eval` globals as follows:\r\n```python\r\n{**arrow_reader.__dict__, **splits.__dict__}\r\n```\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2168\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2168\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2167","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2167\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2167\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2167\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2167","id":849944891,"node_id":"MDU6SXNzdWU4NDk5NDQ4OTE=","number":2167,"title":" Split type not preserved when reloading the dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-04T19:29:54Z","updated_at":"2021-04-19T09:08:55Z","closed_at":"2021-04-19T09:08:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"A minimal reproducible example:\r\n```python\r\n>>> from datasets import load_dataset, Dataset\r\n>>> dset = load_dataset(\"sst\", split=\"train\")\r\n>>> dset.save_to_disk(\"sst\")\r\n>>> type(dset.split)\r\n\r\n>>> dset = Dataset.load_from_disk(\"sst\")\r\n>>> type(dset.split) # NamedSplit expected\r\n\r\n```\r\n\r\nIt seems like this bug was introduced in #2025.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2167\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2167\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2166","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2166\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2166\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2166\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2166","id":849778545,"node_id":"MDU6SXNzdWU4NDk3Nzg1NDU=","number":2166,"title":"Regarding Test Sets for the GEM datasets","user":{"login":"vyraun","id":17217068,"node_id":"MDQ6VXNlcjE3MjE3MDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17217068?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vyraun","html_url":"https:\/\/github.com\/vyraun","followers_url":"https:\/\/api.github.com\/users\/vyraun\/followers","following_url":"https:\/\/api.github.com\/users\/vyraun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vyraun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vyraun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vyraun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vyraun\/orgs","repos_url":"https:\/\/api.github.com\/users\/vyraun\/repos","events_url":"https:\/\/api.github.com\/users\/vyraun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vyraun\/received_events","type":"User","site_admin":false},"labels":[{"id":2067401494,"node_id":"MDU6TGFiZWwyMDY3NDAxNDk0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Dataset%20discussion","name":"Dataset discussion","color":"72f99f","default":false,"description":"Discussions on the datasets"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-04T02:02:45Z","updated_at":"2021-04-06T08:13:12Z","closed_at":"2021-04-06T08:13:12Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"@yjernite Hi, are the test sets for the GEM datasets scheduled to be [added soon](https:\/\/gem-benchmark.com\/shared_task)? \r\n\r\ne.g.\r\n\r\n```\r\nfrom datasets import load_dataset\r\nDATASET_NAME=\"common_gen\"\r\ndata = load_dataset(\"gem\", DATASET_NAME)\r\n```\r\n\r\nThe test set doesn't have the target or references.\r\n\r\n```\r\ndata['test'][0]\r\n{'concept_set_id': 0, 'concepts': ['drill', 'field', 'run', 'team'], 'gem_id': 'common_gen-test-0', 'gem_parent_id': 'common_gen-test-0', 'references': [], 'target': ''}\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2166\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2166\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2165","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2165\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2165\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2165\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2165","id":849771665,"node_id":"MDU6SXNzdWU4NDk3NzE2NjU=","number":2165,"title":"How to convert datasets.arrow_dataset.Dataset to torch.utils.data.Dataset","user":{"login":"y-rokutan","id":24562381,"node_id":"MDQ6VXNlcjI0NTYyMzgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24562381?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/y-rokutan","html_url":"https:\/\/github.com\/y-rokutan","followers_url":"https:\/\/api.github.com\/users\/y-rokutan\/followers","following_url":"https:\/\/api.github.com\/users\/y-rokutan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/y-rokutan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/y-rokutan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/y-rokutan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/y-rokutan\/orgs","repos_url":"https:\/\/api.github.com\/users\/y-rokutan\/repos","events_url":"https:\/\/api.github.com\/users\/y-rokutan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/y-rokutan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-04-04T01:01:48Z","updated_at":"2021-08-24T15:55:35Z","closed_at":"2021-04-07T15:06:04Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI'm trying to pretraine deep-speed model using HF arxiv dataset like:\r\n```\r\ntrain_ds = nlp.load_dataset('scientific_papers', 'arxiv')\r\ntrain_ds.set_format(\r\n type=\"torch\",\r\n columns=[\"input_ids\", \"attention_mask\", \"global_attention_mask\", \"labels\"],\r\n )\r\nengine, _, _, _ = deepspeed.initialize(\r\n args=args,\r\n model=model,\r\n model_parameters=[p for p in model.parameters() if p.requires_grad],\r\n training_data=train_ds)\r\n```\r\nbut deepspeed.initialize accepts torch.utils.data.Dataset only. How can I convert HF-style dataset to torch-style dataset?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2165\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2165\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2164","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2164\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2164\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2164\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2164","id":849739759,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA4NDQ0MTE3","number":2164,"title":"Replace assertTrue(isinstance with assertIsInstance in tests","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-03T21:07:02Z","updated_at":"2021-04-06T14:41:09Z","closed_at":"2021-04-06T14:41:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2164","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2164","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2164.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2164.patch","merged_at":"2021-04-06T14:41:08Z"},"body":"Replaces all the occurrences of the `assertTrue(isinstance(` pattern with `assertIsInstance`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2164\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2164\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2163","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2163\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2163\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2163\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2163","id":849669366,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA4Mzk0NDMz","number":2163,"title":"Concat only unique fields in DatasetInfo.from_merge","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-03T14:31:30Z","updated_at":"2021-04-06T14:40:00Z","closed_at":"2021-04-06T14:39:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2163","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2163","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2163.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2163.patch","merged_at":"2021-04-06T14:39:59Z"},"body":"I thought someone from the community with less experience would be interested in fixing this issue, but that wasn't the case.\r\n\r\nFixes #2103 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2163\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2163\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2162","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2162\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2162\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2162\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2162","id":849129201,"node_id":"MDU6SXNzdWU4NDkxMjkyMDE=","number":2162,"title":"visualization for cc100 is broken ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-02T10:11:13Z","updated_at":"2021-04-07T13:01:07Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nvisualization through dataset viewer for cc100 is broken\r\nhttps:\/\/huggingface.co\/datasets\/viewer\/\r\n\r\nthanks a lot\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2162\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2162\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2161","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2161\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2161\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2161\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2161","id":849127041,"node_id":"MDU6SXNzdWU4NDkxMjcwNDE=","number":2161,"title":"any possibility to download part of large datasets only?","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-04-02T10:06:46Z","updated_at":"2021-07-02T15:19:29Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nSome of the datasets I need like cc100 are very large, and then I wonder if I can download first X samples of the shuffled\/unshuffled data without going through first downloading the whole data then sampling? thanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2161\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2161\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2160","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2160\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2160\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2160\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2160","id":849052921,"node_id":"MDU6SXNzdWU4NDkwNTI5MjE=","number":2160,"title":"data_args.preprocessing_num_workers almost freezes ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-02T07:56:13Z","updated_at":"2021-04-02T10:14:32Z","closed_at":"2021-04-02T10:14:31Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi @lhoestq \r\n\r\nI am running this code from huggingface transformers https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/language-modeling\/run_mlm.py \r\n\r\nto speed up tokenization, since I am running on multiple datasets, I am using data_args.preprocessing_num_workers = 4 with opus100 corpus but this moves on till a point and then this freezes almost for sometime during tokenization steps and then this is back again, overall to me taking more time than normal case, I appreciate your advice on how I can use this option properly to speed up.\r\n\r\nthanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2160\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2160\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2159","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2159\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2159\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2159\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2159","id":848851962,"node_id":"MDU6SXNzdWU4NDg4NTE5NjI=","number":2159,"title":"adding ccnet dataset","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-01T23:28:36Z","updated_at":"2021-04-02T10:05:19Z","closed_at":"2021-04-02T10:05:19Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** ccnet\r\n\r\n- **Description:** \r\nCommon Crawl\r\n\r\n- **Paper:** \r\nhttps:\/\/arxiv.org\/abs\/1911.00359\r\n\r\n- **Data:** \r\nhttps:\/\/github.com\/facebookresearch\/cc_net\r\n\r\n- **Motivation:**\r\nthis is one of the most comprehensive clean monolingual datasets across a variety of languages. Quite important for cross-lingual reseach\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nthanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2159\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2159\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2158","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2158\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2158\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2158\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2158","id":848506746,"node_id":"MDU6SXNzdWU4NDg1MDY3NDY=","number":2158,"title":"viewer \"fake_news_english\" error","user":{"login":"emanuelevivoli","id":9447991,"node_id":"MDQ6VXNlcjk0NDc5OTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9447991?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/emanuelevivoli","html_url":"https:\/\/github.com\/emanuelevivoli","followers_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/followers","following_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/orgs","repos_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/repos","events_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-01T14:13:20Z","updated_at":"2021-04-07T10:26:09Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I visit the [Huggingface - viewer](https:\/\/huggingface.co\/datasets\/viewer\/) web site, under the dataset \"fake_news_english\" I've got this error:\r\n\r\n> ImportError: To be able to use this dataset, you need to install the following dependencies['openpyxl'] using 'pip install # noqa: requires this pandas optional dependency for reading xlsx files' for instance'\r\n\r\nas well as the error Traceback.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2158\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2158\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2157","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2157\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2157\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2157\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2157","id":847205239,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA2MjM1NjUx","number":2157,"title":"updated user permissions based on umask","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-31T19:38:29Z","updated_at":"2021-04-06T07:19:19Z","closed_at":"2021-04-06T07:19:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2157","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2157","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2157.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2157.patch","merged_at":"2021-04-06T07:19:19Z"},"body":"Updated user permissions based on running user's umask (#2065). Let me know if `0o666` is looking good or should I change it to `~umask` only (to give execute permissions as well) ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2157\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2157\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2156","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2156\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2156\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2156\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2156","id":847198295,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA2MjI5MTky","number":2156,"title":"User permissions","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-31T19:33:48Z","updated_at":"2021-03-31T19:34:24Z","closed_at":"2021-03-31T19:34:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2156","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2156","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2156.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2156.patch","merged_at":null},"body":"Updated user permissions based on running user's umask. Let me know if `0o666` is looking good or should I change it to `~umask` only (to give execute permissions as well)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2156\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2156\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2155","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2155\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2155\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2155\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2155","id":846786897,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA1ODU3MTU4","number":2155,"title":"Add table classes to the documentation","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-31T14:36:10Z","updated_at":"2021-04-01T16:46:30Z","closed_at":"2021-03-31T15:42:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2155","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2155","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2155.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2155.patch","merged_at":"2021-03-31T15:42:08Z"},"body":"Following #2025 , I added the table classes to the documentation\r\n\r\ncc @albertvillanova ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2155\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2155\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2154","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2154\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2154\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2154\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2154","id":846763960,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA1ODM2Mjc1","number":2154,"title":"Adding the NorNE dataset for Norwegian POS and NER","user":{"login":"versae","id":173537,"node_id":"MDQ6VXNlcjE3MzUzNw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/173537?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/versae","html_url":"https:\/\/github.com\/versae","followers_url":"https:\/\/api.github.com\/users\/versae\/followers","following_url":"https:\/\/api.github.com\/users\/versae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/versae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/versae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/versae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/versae\/orgs","repos_url":"https:\/\/api.github.com\/users\/versae\/repos","events_url":"https:\/\/api.github.com\/users\/versae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/versae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-31T14:22:50Z","updated_at":"2021-04-01T09:27:00Z","closed_at":"2021-04-01T09:16:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2154","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2154","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2154.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2154.patch","merged_at":"2021-04-01T09:16:08Z"},"body":"NorNE is a manually annotated corpus of named entities which extends the annotation of the existing Norwegian Dependency Treebank. Comprising both of the official standards of written Norwegian (Bokm\u00e5l and Nynorsk), the corpus contains around 600,000 tokens and annotates a rich set of entity types including persons, organizations, locations, geo-political entities, products, and events, in addition to a class corresponding to nominals derived from names.\r\n\r\nSee #1720.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2154\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2154\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2153","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2153\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2153\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2153\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2153","id":846181502,"node_id":"MDU6SXNzdWU4NDYxODE1MDI=","number":2153,"title":"load_dataset ignoring features","user":{"login":"GuillemGSubies","id":37592763,"node_id":"MDQ6VXNlcjM3NTkyNzYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37592763?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/GuillemGSubies","html_url":"https:\/\/github.com\/GuillemGSubies","followers_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/followers","following_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/orgs","repos_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/repos","events_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-03-31T08:30:09Z","updated_at":"2021-08-27T15:23:58Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"First of all, I'm sorry if it is a repeated issue or the changes are already in master, I searched and I didn't find anything. \r\n\r\nI'm using datasets 1.5.0\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/37592763\/113114369-8f376580-920b-11eb-900d-94365b59f04b.png)\r\n\r\nAs you can see, when I load the dataset, the ClassLabels are ignored, I have to cast the dataset in order to make it work.\r\n\r\nCode to reproduce:\r\n\r\n```python\r\nimport datasets\r\ndata_location = \"\/data\/prueba_multiclase\"\r\nfeatures = datasets.Features(\r\n {\"texto\": datasets.Value(\"string\"), \"label\": datasets.features.ClassLabel(names=[\"false\", \"true\"])}\r\n )\r\ndataset = datasets.load_dataset(\r\n \"csv\", data_files=data_location, delimiter=\"\\t\", features=features\r\n )\r\n```\r\n\r\nDataset I used:\r\n\r\n\r\n[prueba_multiclase.zip](https:\/\/github.com\/huggingface\/datasets\/files\/6235022\/prueba_multiclase.zip) (it has to be unzipped)\r\n\r\n\r\nThank you! \u2764\ufe0f \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2153\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2153\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2152","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2152\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2152\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2152\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2152","id":845751273,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA0ODk0MDkz","number":2152,"title":"Update README.md","user":{"login":"JieyuZhao","id":22306304,"node_id":"MDQ6VXNlcjIyMzA2MzA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22306304?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JieyuZhao","html_url":"https:\/\/github.com\/JieyuZhao","followers_url":"https:\/\/api.github.com\/users\/JieyuZhao\/followers","following_url":"https:\/\/api.github.com\/users\/JieyuZhao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JieyuZhao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JieyuZhao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JieyuZhao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JieyuZhao\/orgs","repos_url":"https:\/\/api.github.com\/users\/JieyuZhao\/repos","events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-31T03:21:19Z","updated_at":"2021-04-01T10:20:37Z","closed_at":"2021-04-01T10:20:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2152","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2152","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2152.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2152.patch","merged_at":"2021-04-01T10:20:36Z"},"body":"Updated some descriptions of Wino_Bias dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2152\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2152\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2151","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2151\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2151\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2151\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2151","id":844886081,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA0MDg5MDMw","number":2151,"title":"Add support for axis in concatenate datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/1","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1\/labels","id":6644198,"node_id":"MDk6TWlsZXN0b25lNjY0NDE5OA==","number":1,"title":"1.6","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":4,"state":"closed","created_at":"2021-04-09T13:07:51Z","updated_at":"2021-04-20T16:50:46Z","due_on":"2021-04-16T07:00:00Z","closed_at":"2021-04-20T16:50:46Z"},"comments":5,"created_at":"2021-03-30T16:58:44Z","updated_at":"2021-06-23T17:41:02Z","closed_at":"2021-04-19T16:07:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2151","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2151","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2151.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2151.patch","merged_at":"2021-04-19T16:07:18Z"},"body":"Add support for `axis` (0 or 1) in `concatenate_datasets`.\r\n\r\nClose #853.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2151\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2151\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2150","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2150\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2150\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2150\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2150","id":844776448,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzOTg3OTcx","number":2150,"title":"Allow pickling of big in-memory tables","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-30T15:51:56Z","updated_at":"2021-03-31T10:37:15Z","closed_at":"2021-03-31T10:37:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2150","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2150","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2150.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2150.patch","merged_at":"2021-03-31T10:37:14Z"},"body":"This should fix issue #2134 \r\n\r\nPickling is limited to <4GiB objects, it's not possible to pickle a big arrow table (for multiprocessing for example).\r\nFor big tables, we have to write them on disk and only pickle the path to the table.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2150\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2150\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2149","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2149\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2149\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2149\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2149","id":844734076,"node_id":"MDU6SXNzdWU4NDQ3MzQwNzY=","number":2149,"title":"Telugu subset missing for xtreme tatoeba dataset","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-30T15:26:34Z","updated_at":"2021-04-07T10:23:35Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"from nlp import load_dataset\r\ntrain_dataset = load_dataset('xtreme', 'tatoeba.tel')['validation']\r\nValueError: BuilderConfig tatoeba.tel not found.\r\n\r\nbut language tel is actually included in xtreme:\r\nhttps:\/\/github.com\/google-research\/xtreme\/blob\/master\/utils_preprocess.py\r\ndef tatoeba_preprocess(args):\r\n lang3_dict = {\r\n 'afr':'af', 'ara':'ar', 'bul':'bg', 'ben':'bn',\r\n 'deu':'de', 'ell':'el', 'spa':'es', 'est':'et',\r\n 'eus':'eu', 'pes':'fa', 'fin':'fi', 'fra':'fr',\r\n 'heb':'he', 'hin':'hi', 'hun':'hu', 'ind':'id',\r\n 'ita':'it', 'jpn':'ja', 'jav':'jv', 'kat':'ka',\r\n 'kaz':'kk', 'kor':'ko', 'mal':'ml', 'mar':'mr',\r\n 'nld':'nl', 'por':'pt', 'rus':'ru', 'swh':'sw',\r\n 'tam':'ta', **_'tel':'te'_**, 'tha':'th', 'tgl':'tl', <----here\r\n 'tur':'tr', 'urd':'ur', 'vie':'vi', 'cmn':'zh',\r\n 'eng':'en',\r\n }","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2149\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2149\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2148","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2148\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2148\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2148\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2148","id":844700910,"node_id":"MDU6SXNzdWU4NDQ3MDA5MTA=","number":2148,"title":"Add configurable options to `seqeval` metric","user":{"login":"marrodion","id":44571847,"node_id":"MDQ6VXNlcjQ0NTcxODQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44571847?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/marrodion","html_url":"https:\/\/github.com\/marrodion","followers_url":"https:\/\/api.github.com\/users\/marrodion\/followers","following_url":"https:\/\/api.github.com\/users\/marrodion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/marrodion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/marrodion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/marrodion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/marrodion\/orgs","repos_url":"https:\/\/api.github.com\/users\/marrodion\/repos","events_url":"https:\/\/api.github.com\/users\/marrodion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/marrodion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-30T15:04:06Z","updated_at":"2021-04-15T13:49:46Z","closed_at":"2021-04-15T13:49:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Right now `load_metric(\"seqeval\")` only works in the default mode of evaluation (equivalent to conll evaluation).\r\n\r\nHowever, seqeval library [supports](https:\/\/github.com\/chakki-works\/seqeval#support-features) different evaluation schemes (IOB1, IOB2, etc.), which can be plugged in just by supporting additional kwargs in `Seqeval._compute`\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/85cf7ff920c90ca2e12bedca12b36d2a043c3da2\/metrics\/seqeval\/seqeval.py#L109\r\n\r\nThings that would be relevant are, for example, supporting `mode=\"strict\", scheme=IOB2` to count only full entity match as a true positive and omit partial matches.\r\n\r\nThe only problem I see is that the spirit of `metrics` seems to not require additional imports from user. `seqeval` only supports schemes as objects, without any string aliases. \r\n\r\nIt can be solved naively with mapping like `{\"IOB2\": seqeval.scheme.IOB2}`. Or just left as is and require user to explicitly import scheme from `seqeval` if he wants to configure it past the default implementation.\r\n\r\nIf that makes sense, I am happy to implement the change.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2148\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2148\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2147","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2147\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2147\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2147\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2147","id":844687831,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzOTA3NjM4","number":2147,"title":"Render docstring return type as inline","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-30T14:55:43Z","updated_at":"2021-03-31T13:11:05Z","closed_at":"2021-03-31T13:11:05Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2147","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2147","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2147.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2147.patch","merged_at":"2021-03-31T13:11:05Z"},"body":"This documentation setting will avoid having the return type in a separate line under `Return type`. \r\n\r\nSee e.g. current docs for `Dataset.to_csv`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2147\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2147\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2146","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2146\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2146\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2146\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2146","id":844673244,"node_id":"MDU6SXNzdWU4NDQ2NzMyNDQ=","number":2146,"title":"Dataset file size on disk is very large with 3D Array","user":{"login":"jblemoine","id":22685854,"node_id":"MDQ6VXNlcjIyNjg1ODU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22685854?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jblemoine","html_url":"https:\/\/github.com\/jblemoine","followers_url":"https:\/\/api.github.com\/users\/jblemoine\/followers","following_url":"https:\/\/api.github.com\/users\/jblemoine\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jblemoine\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jblemoine\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jblemoine\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jblemoine\/orgs","repos_url":"https:\/\/api.github.com\/users\/jblemoine\/repos","events_url":"https:\/\/api.github.com\/users\/jblemoine\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jblemoine\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-30T14:46:09Z","updated_at":"2021-04-16T13:07:02Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, \r\n\r\nI have created my own dataset using the provided dataset loading script. It is an image dataset where images are stored as 3D Array with dtype=uint8. \r\n\r\nThe actual size on disk is surprisingly large. It takes 520 MB. Here is some info from `dataset_info.json`. \r\n\r\n`{\r\n \"description\": \"\",\r\n \"citation\": \"\",\r\n \"homepage\": \"\",\r\n \"license\": \"\",\r\n \"features\": {\r\n \"image\": {\r\n \"shape\": [224, 224, 3],\r\n \"dtype\": \"uint8\",\r\n \"id\": null,\r\n \"_type\": \"Array3D\",\r\n }\r\n },\r\n \"post_processed\": null,\r\n \"supervised_keys\": null,\r\n \"builder_name\": \"shot_type_image_dataset\",\r\n \"config_name\": \"default\",\r\n \"version\": {\r\n \"version_str\": \"0.0.0\",\r\n \"description\": null,\r\n \"major\": 0,\r\n \"minor\": 0,\r\n \"patch\": 0,\r\n },\r\n \"splits\": {\r\n \"train\": {\r\n \"name\": \"train\",\r\n \"num_bytes\": 520803408,\r\n \"num_examples\": 1479,\r\n \"dataset_name\": \"shot_type_image_dataset\",\r\n }\r\n },\r\n \"download_checksums\": {\r\n \"\": {\r\n \"num_bytes\": 16940447118,\r\n \"checksum\": \"5854035705efe08b0ed8f3cf3da7b4d29cba9055c2d2d702c79785350d72ee03\",\r\n }\r\n },\r\n \"download_size\": 16940447118,\r\n \"post_processing_size\": null,\r\n \"dataset_size\": 520803408,\r\n \"size_in_bytes\": 17461250526,\r\n}`\r\n\r\nI have created the same dataset with tensorflow_dataset and it takes only 125MB on disk.\r\n\r\nI am wondering, is it normal behavior ? I understand `Datasets` uses Arrow for serialization wheres tf uses TF Records.\r\n\r\nThis might be a problem for large dataset. \r\n\r\nThanks for your help. \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2146\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2146\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2145","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2145\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2145\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2145\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2145","id":844603518,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzODMxOTE2","number":2145,"title":"Implement Dataset add_column","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/3","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3\/labels","id":6644287,"node_id":"MDk6TWlsZXN0b25lNjY0NDI4Nw==","number":3,"title":"1.7","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":3,"state":"closed","created_at":"2021-04-09T13:16:31Z","updated_at":"2021-05-31T16:20:53Z","due_on":"2021-05-14T07:00:00Z","closed_at":"2021-05-31T16:20:53Z"},"comments":1,"created_at":"2021-03-30T14:02:14Z","updated_at":"2021-04-29T14:50:44Z","closed_at":"2021-04-29T14:50:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2145","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2145","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2145.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2145.patch","merged_at":"2021-04-29T14:50:43Z"},"body":"Implement `Dataset.add_column`.\r\n\r\nClose #1954.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2145\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2145\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2144","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2144\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2144\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2144\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2144","id":844352067,"node_id":"MDU6SXNzdWU4NDQzNTIwNjc=","number":2144,"title":"Loading wikipedia 20200501.en throws pyarrow related error","user":{"login":"TomPyonsuke","id":26637405,"node_id":"MDQ6VXNlcjI2NjM3NDA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26637405?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TomPyonsuke","html_url":"https:\/\/github.com\/TomPyonsuke","followers_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/followers","following_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/orgs","repos_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/repos","events_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-30T10:38:31Z","updated_at":"2021-04-01T09:21:17Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Problem description**\r\nI am getting the following error when trying to load wikipedia\/20200501.en dataset.\r\n\r\n**Error log**\r\nDownloading and preparing dataset wikipedia\/20200501.en (download: 16.99 GiB, generated: 17.07 GiB, post-processed: Unknown size, total: 34.06 GiB) to \/usr\/local\/workspace\/NAS_NLP\/cache\/wikipedia\/20200501.en\/1.0.0\/50aa706aa417bb77d910ad61211cc672c0ef3e0f224225a5e0a18277ade8b931...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 14.6k\/14.6k [00:00<00:00, 5.41MB\/s]\r\nDownloading: 59%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 10.7G\/18.3G [11:30<08:08, 15.5MB\/s]\r\nDataset wikipedia downloaded and prepared to \/usr\/local\/workspace\/NAS_NLP\/cache\/wikipedia\/20200501.en\/1.0.0\/50aa706aa417bb77d910ad61211cc672c0ef3e0f224225a5e0a18277ade8b931. Subsequent calls will reuse this data.\r\nTraceback (most recent call last):\r\n File \"load_wiki.py\", line 2, in \r\n ds = load_dataset('wikipedia', '20200501.en', cache_dir='\/usr\/local\/workspace\/NAS_NLP\/cache')\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py\", line 751, in load_dataset\r\n ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/builder.py\", line 746, in as_dataset\r\n map_tuple=True,\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/py_utils.py\", line 204, in map_nested\r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/py_utils.py\", line 204, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/py_utils.py\", line 142, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/builder.py\", line 763, in _build_single_dataset\r\n in_memory=in_memory,\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/builder.py\", line 835, in _as_dataset\r\n in_memory=in_memory,\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 215, in read\r\n return self.read_files(files=files, original_instructions=instructions, in_memory=in_memory)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 236, in read_files\r\n pa_table = self._read_files(files, in_memory=in_memory)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 171, in _read_files\r\n pa_table: pa.Table = self._get_dataset_from_filename(f_dict, in_memory=in_memory)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 302, in _get_dataset_from_filename\r\n pa_table = ArrowReader.read_table(filename, in_memory=in_memory)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 324, in read_table\r\n pa_table = f.read_all()\r\n File \"pyarrow\/ipc.pxi\", line 544, in pyarrow.lib.RecordBatchReader.read_all\r\n File \"pyarrow\/error.pxi\", line 99, in pyarrow.lib.check_status\r\nOSError: Expected to be able to read 9176784 bytes for message body, got 4918712\r\n\r\n**Detailed version info**\r\ndatasets==1.5.0\r\n - dataclasses [required: Any, installed: 0.8]\r\n - dill [required: Any, installed: 0.3.3]\r\n - fsspec [required: Any, installed: 0.8.7]\r\n - importlib-metadata [required: Any, installed: 1.7.0]\r\n - zipp [required: >=0.5, installed: 3.1.0]\r\n - huggingface-hub [required: <0.1.0, installed: 0.0.7]\r\n - filelock [required: Any, installed: 3.0.12]\r\n - importlib-metadata [required: Any, installed: 1.7.0]\r\n - zipp [required: >=0.5, installed: 3.1.0]\r\n - requests [required: Any, installed: 2.24.0]\r\n - certifi [required: >=2017.4.17, installed: 2020.6.20]\r\n - chardet [required: >=3.0.2,<4, installed: 3.0.4]\r\n - idna [required: >=2.5,<3, installed: 2.6]\r\n - urllib3 [required: >=1.21.1,<1.26,!=1.25.1,!=1.25.0, installed: 1.25.10]\r\n - tqdm [required: Any, installed: 4.49.0]\r\n - importlib-metadata [required: Any, installed: 1.7.0]\r\n - zipp [required: >=0.5, installed: 3.1.0]\r\n - multiprocess [required: Any, installed: 0.70.11.1]\r\n - dill [required: >=0.3.3, installed: 0.3.3]\r\n - numpy [required: >=1.17, installed: 1.17.0]\r\n - pandas [required: Any, installed: 1.1.5]\r\n - numpy [required: >=1.15.4, installed: 1.17.0]\r\n - python-dateutil [required: >=2.7.3, installed: 2.8.0]\r\n - six [required: >=1.5, installed: 1.15.0]\r\n - pytz [required: >=2017.2, installed: 2020.1]\r\n - pyarrow [required: >=0.17.1, installed: 3.0.0]\r\n - numpy [required: >=1.16.6, installed: 1.17.0]\r\n - requests [required: >=2.19.0, installed: 2.24.0]\r\n - certifi [required: >=2017.4.17, installed: 2020.6.20]\r\n - chardet [required: >=3.0.2,<4, installed: 3.0.4]\r\n - idna [required: >=2.5,<3, installed: 2.6]\r\n - urllib3 [required: >=1.21.1,<1.26,!=1.25.1,!=1.25.0, installed: 1.25.10]\r\n - tqdm [required: >=4.27,<4.50.0, installed: 4.49.0]\r\n - xxhash [required: Any, installed: 2.0.0]\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2144\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2144\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2143","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2143\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2143\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2143\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2143","id":844313228,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzNTc0NjI0","number":2143,"title":"task casting via load_dataset","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"assignees":[{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-03-30T10:00:42Z","updated_at":"2021-06-11T13:20:41Z","closed_at":"2021-06-11T13:20:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2143","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2143","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2143.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2143.patch","merged_at":null},"body":"wip\r\nnot satisfied with the API, it means as a dataset implementer I need to write a function with boilerplate and write classes for each `` \"facet\".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2143\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2143\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2142","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2142\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2142\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2142\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2142","id":843919420,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzMjQwMzUy","number":2142,"title":"Gem V1.1","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-29T23:47:02Z","updated_at":"2021-03-30T00:10:02Z","closed_at":"2021-03-30T00:10:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2142","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2142","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2142.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2142.patch","merged_at":"2021-03-30T00:10:02Z"},"body":"This branch updates the GEM benchmark to its 1.1 version which includes:\r\n- challenge sets for most tasks\r\n- detokenized TurkCorpus to match the rest of the text simplification subtasks\r\n- fixed inputs for TurkCorpus and ASSET test sets\r\n- 18 languages in WikiLingua\r\n\r\ncc @sebastianGehrmann","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2142\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2142\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2141","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2141\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2141\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2141\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2141","id":843914790,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzMjM2MjUw","number":2141,"title":"added spans field for the wikiann datasets","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-29T23:38:26Z","updated_at":"2021-03-31T13:27:50Z","closed_at":"2021-03-31T13:27:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2141","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2141","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2141.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2141.patch","merged_at":"2021-03-31T13:27:50Z"},"body":"Hi @lhoestq \r\nI tried to add spans to the wikiann datasets.\r\nThanks a lot for kindly having a look.\r\nThis addresses https:\/\/github.com\/huggingface\/datasets\/issues\/2130. \r\nBest regards\r\nRabeeh ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2141\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2141\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2140","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2140\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2140\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2140\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2140","id":843830451,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzMTYxMjYx","number":2140,"title":"add banking77 dataset","user":{"login":"dkajtoch","id":32985207,"node_id":"MDQ6VXNlcjMyOTg1MjA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32985207?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dkajtoch","html_url":"https:\/\/github.com\/dkajtoch","followers_url":"https:\/\/api.github.com\/users\/dkajtoch\/followers","following_url":"https:\/\/api.github.com\/users\/dkajtoch\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dkajtoch\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dkajtoch\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dkajtoch\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dkajtoch\/orgs","repos_url":"https:\/\/api.github.com\/users\/dkajtoch\/repos","events_url":"https:\/\/api.github.com\/users\/dkajtoch\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dkajtoch\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-29T21:32:23Z","updated_at":"2021-04-09T09:32:18Z","closed_at":"2021-04-09T09:32:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2140","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2140","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2140.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2140.patch","merged_at":"2021-04-09T09:32:18Z"},"body":"Intent classification\/detection dataset from banking category with 77 unique intents.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2140\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2140\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2139","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2139\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2139\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2139\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2139","id":843662613,"node_id":"MDU6SXNzdWU4NDM2NjI2MTM=","number":2139,"title":"TypeError when using save_to_disk in a dataset loaded with ReadInstruction split","user":{"login":"PedroMLF","id":22480495,"node_id":"MDQ6VXNlcjIyNDgwNDk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22480495?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PedroMLF","html_url":"https:\/\/github.com\/PedroMLF","followers_url":"https:\/\/api.github.com\/users\/PedroMLF\/followers","following_url":"https:\/\/api.github.com\/users\/PedroMLF\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PedroMLF\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PedroMLF\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PedroMLF\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PedroMLF\/orgs","repos_url":"https:\/\/api.github.com\/users\/PedroMLF\/repos","events_url":"https:\/\/api.github.com\/users\/PedroMLF\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PedroMLF\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-29T18:23:54Z","updated_at":"2021-03-30T09:12:53Z","closed_at":"2021-03-30T09:12:53Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nLoading a dataset with `load_dataset` using a split defined via `ReadInstruction` and then saving it to disk results in the following error: `TypeError: Object of type ReadInstruction is not JSON serializable`.\r\n\r\nHere is the minimal reproducible example:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom datasets import ReadInstruction\r\n\r\ndata_1 = load_dataset(\r\n \"wikiann\",\r\n \"en\",\r\n split=\"validation\",\r\n)\r\n\r\ndata_1.save_to_disk(\"temporary_path_1\")\r\n\r\nprint(\"Save with regular split works.\")\r\n\r\ndata_2 = load_dataset(\r\n \"wikiann\",\r\n \"en\",\r\n split=ReadInstruction(\"validation\", to=50, unit=\"%\"),\r\n)\r\n\r\ndata_2.save_to_disk(\"temporary_path_2\")\r\n```\r\n\r\nand the corresponding output:\r\n\r\n```\r\nReusing dataset wikiann (\/xxxxx\/.cache\/huggingface\/datasets\/wikiann\/en\/1.1.0\/0b11a6fb31eea02f38ca17610657bfba3206100685283014daceb8da291c3be9)\r\nSave with regular split works.\r\nReusing dataset wikiann (\/xxxxx\/.cache\/huggingface\/datasets\/wikiann\/en\/1.1.0\/0b11a6fb31eea02f38ca17610657bfba3206100685283014daceb8da291c3be9)\r\nTraceback (most recent call last):\r\n File \"bug.py\", line 20, in \r\n data_2.save_to_disk(\"temporary_path_2\")\r\n File \"\/xxxxx\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 645, in save_to_disk\r\n json.dump(state, state_file, indent=2, sort_keys=True)\r\n File \"\/usr\/lib\/python3.7\/json\/__init__.py\", line 179, in dump\r\n for chunk in iterable:\r\n File \"\/usr\/lib\/python3.7\/json\/encoder.py\", line 431, in _iterencode\r\n yield from _iterencode_dict(o, _current_indent_level)\r\n File \"\/usr\/lib\/python3.7\/json\/encoder.py\", line 405, in _iterencode_dict\r\n yield from chunks\r\n File \"\/usr\/lib\/python3.7\/json\/encoder.py\", line 438, in _iterencode\r\n o = _default(o)\r\n File \"\/usr\/lib\/python3.7\/json\/encoder.py\", line 179, in default\r\n raise TypeError(f'Object of type {o.__class__.__name__} '\r\nTypeError: Object of type ReadInstruction is not JSON serializable\r\n```\r\n\r\nLet me know if there is some misuse from my end.\r\n\r\nThanks in advance.\r\n ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2139\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2139\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2138","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2138\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2138\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2138\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2138","id":843508402,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAyODc4NzU2","number":2138,"title":"Add CER metric","user":{"login":"chutaklee","id":6931004,"node_id":"MDQ6VXNlcjY5MzEwMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6931004?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chutaklee","html_url":"https:\/\/github.com\/chutaklee","followers_url":"https:\/\/api.github.com\/users\/chutaklee\/followers","following_url":"https:\/\/api.github.com\/users\/chutaklee\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chutaklee\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chutaklee\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chutaklee\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chutaklee\/orgs","repos_url":"https:\/\/api.github.com\/users\/chutaklee\/repos","events_url":"https:\/\/api.github.com\/users\/chutaklee\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chutaklee\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-29T15:52:27Z","updated_at":"2021-04-06T16:16:11Z","closed_at":"2021-04-06T07:14:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2138","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2138","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2138.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2138.patch","merged_at":"2021-04-06T07:14:38Z"},"body":"Add Character Error Rate (CER) metric that is used in evaluation in ASR. I also have written unittests (hopefully thorough enough) but I'm not sure how to integrate them into the existed codebase.\r\n\r\n```python\r\nfrom cer import CER\r\n\r\ncer = CER()\r\n\r\nclass TestCER(unittest.TestCase):\r\n def test_cer_case_senstive(self):\r\n refs = ['White House']\r\n preds = ['white house']\r\n # S = 2, D = 0, I = 0, N = 11, CER = 2 \/ 11\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.1818181818) < 1e-6)\r\n\r\n def test_cer_whitespace(self):\r\n refs = ['were wolf']\r\n preds = ['werewolf']\r\n # S = 0, D = 0, I = 1, N = 9, CER = 1 \/ 9\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.1111111) < 1e-6)\r\n\r\n refs = ['werewolf']\r\n preds = ['weae wolf']\r\n # S = 1, D = 1, I = 0, N = 8, CER = 0.25\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.25) < 1e-6)\r\n\r\n # consecutive whitespaces case 1\r\n refs = ['were wolf']\r\n preds = ['were wolf']\r\n # S = 0, D = 0, I = 0, N = 9, CER = 0\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.0) < 1e-6)\r\n\r\n # consecutive whitespaces case 2\r\n refs = ['were wolf']\r\n preds = ['were wolf']\r\n # S = 0, D = 0, I = 0, N = 9, CER = 0\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.0) < 1e-6)\r\n\r\n def test_cer_sub(self):\r\n refs = ['werewolf']\r\n preds = ['weaewolf']\r\n # S = 1, D = 0, I = 0, N = 8, CER = 0.125\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.125) < 1e-6)\r\n\r\n def test_cer_del(self):\r\n refs = ['werewolf']\r\n preds = ['wereawolf']\r\n # S = 0, D = 1, I = 0, N = 8, CER = 0.125\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.125) < 1e-6)\r\n\r\n def test_cer_insert(self):\r\n refs = ['werewolf']\r\n preds = ['wereolf']\r\n # S = 0, D = 0, I = 1, N = 8, CER = 0.125\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.125) < 1e-6)\r\n\r\n def test_cer_equal(self):\r\n refs = ['werewolf']\r\n char_error_rate = cer.compute(predictions=refs, references=refs)\r\n self.assertEqual(char_error_rate, 0.0)\r\n\r\n def test_cer_list_of_seqs(self):\r\n refs = ['werewolf', 'I am your father']\r\n char_error_rate = cer.compute(predictions=refs, references=refs)\r\n self.assertEqual(char_error_rate, 0.0)\r\n\r\n refs = ['werewolf', 'I am your father', 'doge']\r\n preds = ['werxwolf', 'I am your father', 'doge']\r\n # S = 1, D = 0, I = 0, N = 28, CER = 1 \/ 28\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.03571428) < 1e-6)\r\n\r\n def test_cer_unicode(self):\r\n ref = [u'\u6211\u80fd\u541e\u4e0b\u73bb\u7483\u800c\u4e0d\u4f24\u8eab\u4f53']\r\n pred = [u' \u80fd\u541e\u867e\u73bb\u7483\u800c \u4e0d\u971c\u8eab\u4f53\u5566']\r\n # S = 3, D = 2, I = 0, N = 11\r\n # CER = 5 \/ 11\r\n char_error_rate = cer.compute(predictions=pred, references=ref)\r\n self.assertTrue(abs(char_error_rate - 0.4545454545) < 1e-6)\r\n\r\n ref = [u'\u6211\u80fd\u541e', u'\u4e0b\u73bb\u7483\u800c\u4e0d\u4f24\u8eab\u4f53']\r\n pred = [u'\u6211 \u80fd \u541e \u4e0b \u73bb \u7483', u'\u800c\u4e0d\u4f24\u8eab\u4f53']\r\n # S = 0, D = 5, I = 0, N = 11\r\n # CER = 5 \/ 11\r\n char_error_rate = cer.compute(predictions=pred, references=ref)\r\n self.assertTrue(abs(char_error_rate - 0.454545454545) < 1e-6)\r\n\r\n ref = [u'\u6211\u80fd\u541e\u4e0b\u73bb\u7483\u800c\u4e0d\u4f24\u8eab\u4f53']\r\n char_error_rate = cer.compute(predictions=ref, references=ref)\r\n self.assertFalse(char_error_rate, 0.0)\r\n\r\n def test_cer_empty(self):\r\n ref = ''\r\n pred = 'Hypothesis'\r\n with self.assertRaises(ValueError):\r\n char_error_rate = cer.compute(predictions=pred, references=ref)\r\n\r\nif __name__ == '__main__':\r\n unittest.main()\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2138\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2138\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2137","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2137\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2137\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2137\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2137","id":843502835,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAyODc0MDYw","number":2137,"title":"Fix missing infos from concurrent dataset loading","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-29T15:46:12Z","updated_at":"2021-03-31T10:35:56Z","closed_at":"2021-03-31T10:35:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2137","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2137","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2137.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2137.patch","merged_at":"2021-03-31T10:35:55Z"},"body":"This should fix issue #2131 \r\n\r\nWhen calling `load_dataset` at the same time from 2 workers, one of the worker could have missing split infos when reloading the dataset from the cache.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2137\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2137\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2136","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2136\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2136\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2136\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2136","id":843492015,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAyODY0ODY5","number":2136,"title":"fix dialogue action slot name and value","user":{"login":"adamlin120","id":31605305,"node_id":"MDQ6VXNlcjMxNjA1MzA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31605305?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adamlin120","html_url":"https:\/\/github.com\/adamlin120","followers_url":"https:\/\/api.github.com\/users\/adamlin120\/followers","following_url":"https:\/\/api.github.com\/users\/adamlin120\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adamlin120\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adamlin120\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adamlin120\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adamlin120\/orgs","repos_url":"https:\/\/api.github.com\/users\/adamlin120\/repos","events_url":"https:\/\/api.github.com\/users\/adamlin120\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adamlin120\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-29T15:34:13Z","updated_at":"2021-03-31T12:48:02Z","closed_at":"2021-03-31T12:48:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2136","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2136","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2136.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2136.patch","merged_at":"2021-03-31T12:48:01Z"},"body":"fix #2128","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2136\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2136\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2135","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2135\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2135\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2135\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2135","id":843246344,"node_id":"MDU6SXNzdWU4NDMyNDYzNDQ=","number":2135,"title":"en language data from MLQA dataset is missing","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-29T10:47:50Z","updated_at":"2021-03-30T10:20:23Z","closed_at":"2021-03-30T10:20:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI need mlqa-translate-train.en dataset, but it is missing from the MLQA dataset. could you have a look please? @lhoestq thank you for your help to fix this issue. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2135\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2135\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2134","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2134\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2134\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2134\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2134","id":843242849,"node_id":"MDU6SXNzdWU4NDMyNDI4NDk=","number":2134,"title":"Saving large in-memory datasets with save_to_disk crashes because of pickling","user":{"login":"prokopCerny","id":5815801,"node_id":"MDQ6VXNlcjU4MTU4MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5815801?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/prokopCerny","html_url":"https:\/\/github.com\/prokopCerny","followers_url":"https:\/\/api.github.com\/users\/prokopCerny\/followers","following_url":"https:\/\/api.github.com\/users\/prokopCerny\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/prokopCerny\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/prokopCerny\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/prokopCerny\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/prokopCerny\/orgs","repos_url":"https:\/\/api.github.com\/users\/prokopCerny\/repos","events_url":"https:\/\/api.github.com\/users\/prokopCerny\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/prokopCerny\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-03-29T10:43:15Z","updated_at":"2021-05-03T17:59:21Z","closed_at":"2021-05-03T17:59:21Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Using Datasets 1.5.0 on Python 3.7.\r\nRecently I've been working on medium to large size datasets (pretokenized raw text sizes from few gigabytes to low tens of gigabytes), and have found out that several preprocessing steps are massively faster when done in memory, and I have the ability to requisition a lot of RAM, so I decided to do these steps completely out of the datasets library.\r\n\r\n So my workflow is to do several .map() on datasets object, then for the operation which is faster in memory to extract the necessary columns from the dataset and then drop it whole, do the transformation in memory, and then create a fresh Dataset object using .from_dict() or other method. \r\n\r\nWhen I then try to call save_to_disk(path) on the dataset, it crashes because of pickling, which appears to be because of using old pickle protocol which doesn't support large files (over 4 GiB).\r\n```\r\nTraceback (most recent call last):\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 80, in \r\n main()\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 75, in main\r\n tokenize_and_chunkify(config)\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 60, in tokenize_and_chunkify\r\n contexts_dataset.save_to_disk(chunked_path)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 457, in save_to_disk\r\n self = pickle.loads(pickle.dumps(self))\r\nOverflowError: cannot serialize a bytes object larger than 4 GiB\r\n```\r\nFrom what I've seen this issue may be possibly fixed, as the line `self = pickle.loads(pickle.dumps(self))` does not appear to be present in the current state of the repository.\r\n\r\nTo save these datasets to disk, I've resorted to calling .map() over them with `function=None` and specifying the .arrow cache file, and then creating a new dataset using the .from_file() method, which I can then safely save to disk.\r\n\r\nAdditional issue when working with these large in-memory datasets is when using multiprocessing, is again to do with pickling. I've tried to speed up the mapping with function=None by specifying num_proc to the available cpu count, and I again get issues with transferring the dataset, with the following traceback. I am not sure if I should open a separate issue for that.\r\n```\r\nTraceback (most recent call last):\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 94, in \r\n main()\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 89, in main\r\n tokenize_and_chunkify(config)\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 67, in tokenize_and_chunkify\r\n contexts_dataset.map(function=None, cache_file_name=str(output_dir_path \/ \"tmp.arrow\"), writer_batch_size=50000, num_proc=config.threads)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1485, in map\r\n transformed_shards = [r.get() for r in results]\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1485, in \r\n transformed_shards = [r.get() for r in results]\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/pool.py\", line 657, in get\r\n raise self._value\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/pool.py\", line 431, in _handle_tasks\r\n put(task)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/connection.py\", line 209, in send\r\n self._send_bytes(_ForkingPickler.dumps(obj))\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/reduction.py\", line 54, in dumps\r\n cls(buf, protocol, *args, **kwds).dump(obj)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 454, in dump\r\n StockPickler.dump(self, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 437, in dump\r\n self.save(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 941, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 859, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 885, in _batch_setitems\r\n save(v)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 662, in save_reduce\r\n save(state)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 941, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 859, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 885, in _batch_setitems\r\n save(v)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 843, in _batch_appends\r\n save(x)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 843, in _batch_appends\r\n save(x)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 732, in save_bytes\r\n self._write_large_bytes(BINBYTES + pack(\"\r\n main()\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 89, in main\r\n tokenize_and_chunkify(config)\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 67, in tokenize_and_chunkify\r\n contexts_dataset.map(function=None, cache_file_name=str(output_dir_path \/ \"tmp.arrow\"), writer_batch_size=50000, num_proc=config.threads)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1485, in map\r\n transformed_shards = [r.get() for r in results]\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1485, in \r\n transformed_shards = [r.get() for r in results]\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/pool.py\", line 657, in get\r\n raise self._value\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/pool.py\", line 431, in _handle_tasks\r\n put(task)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/connection.py\", line 209, in send\r\n self._send_bytes(_ForkingPickler.dumps(obj))\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/reduction.py\", line 54, in dumps\r\n cls(buf, protocol, *args, **kwds).dump(obj)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 454, in dump\r\n StockPickler.dump(self, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 437, in dump\r\n self.save(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 941, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 859, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 885, in _batch_setitems\r\n save(v)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 662, in save_reduce\r\n save(state)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 941, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 859, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 885, in _batch_setitems\r\n save(v)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 843, in _batch_appends\r\n save(x)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 843, in _batch_appends\r\n save(x)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 732, in save_bytes\r\n self._write_large_bytes(BINBYTES + pack(\"\r\n73 | \u00a0 | main()\r\n74 | \u00a0 | File \"run_gpt.py\", line 222, in main\r\n75 | \u00a0 | delimiter=\"\\t\", column_names=[\"input_ids\", \"attention_mask\", \"chinese_ref\"])\r\n76 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 747, in load_dataset\r\n77 | \u00a0 | use_auth_token=use_auth_token,\r\n78 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 513, in download_and_prepare\r\n79 | \u00a0 | self.download_post_processing_resources(dl_manager)\r\n80 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 673, in download_post_processing_resources\r\n81 | \u00a0 | for split in self.info.splits:\r\n82 | \u00a0 | TypeError: 'NoneType' object is not iterable\r\n83 | \u00a0 | WARNING:datasets.builder:Reusing dataset csv (\/usr\/local\/app\/.cache\/huggingface\/datasets\/csv\/default-1c257ebd48e225e7\/0.0.0\/2960f95a26e85d40ca41a230ac88787f715ee3003edaacb8b1f0891e9f04dda2)\r\n84 | \u00a0 | Traceback (most recent call last):\r\n85 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/runpy.py\", line 193, in _run_module_as_main\r\n86 | \u00a0 | \"__main__\", mod_spec)\r\n87 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/runpy.py\", line 85, in _run_code\r\n88 | \u00a0 | exec(code, run_globals)\r\n89 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/torch\/distributed\/launch.py\", line 340, in \r\n90 | \u00a0 | main()\r\n91 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/torch\/distributed\/launch.py\", line 326, in main\r\n92 | \u00a0 | sigkill_handler(signal.SIGTERM, None) # not coming back\r\n93 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/torch\/distributed\/launch.py\", line 301, in sigkill_handler\r\n94 | \u00a0 | raise subprocess.CalledProcessError(returncode=last_return_code, cmd=cmd)\r\n\r\n```\r\nOn worker 1 it loads the dataset well, however on worker 2 will get this error. \r\nAnd I will meet this error from time to time, sometimes it just goes well.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2131\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":1,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2131\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2130","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2130\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2130\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2130\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2130","id":843111936,"node_id":"MDU6SXNzdWU4NDMxMTE5MzY=","number":2130,"title":"wikiann dataset is missing columns ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-03-29T08:23:00Z","updated_at":"2021-08-27T14:44:18Z","closed_at":"2021-08-27T14:44:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nWikiann dataset needs to have \"spans\" columns, which is necessary to be able to use this dataset, but this column is missing from huggingface datasets, could you please have a look? thank you @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2130\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2130\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2129","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2129\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2129\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2129\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2129","id":843033656,"node_id":"MDU6SXNzdWU4NDMwMzM2NTY=","number":2129,"title":"How to train BERT model with next sentence prediction?","user":{"login":"jnishi","id":836541,"node_id":"MDQ6VXNlcjgzNjU0MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/836541?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jnishi","html_url":"https:\/\/github.com\/jnishi","followers_url":"https:\/\/api.github.com\/users\/jnishi\/followers","following_url":"https:\/\/api.github.com\/users\/jnishi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jnishi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jnishi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jnishi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jnishi\/orgs","repos_url":"https:\/\/api.github.com\/users\/jnishi\/repos","events_url":"https:\/\/api.github.com\/users\/jnishi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jnishi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-29T06:48:03Z","updated_at":"2021-04-01T04:58:40Z","closed_at":"2021-04-01T04:58:40Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello.\r\n\r\nI'm trying to pretrain the BERT model with next sentence prediction. Is there any function that supports next sentence prediction \r\nlike ` TextDatasetForNextSentencePrediction` of `huggingface\/transformers` ?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2129\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2129\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2128","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2128\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2128\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2128\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2128","id":843023910,"node_id":"MDU6SXNzdWU4NDMwMjM5MTA=","number":2128,"title":"Dialogue action slot name and value are reversed in MultiWoZ 2.2","user":{"login":"adamlin120","id":31605305,"node_id":"MDQ6VXNlcjMxNjA1MzA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31605305?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adamlin120","html_url":"https:\/\/github.com\/adamlin120","followers_url":"https:\/\/api.github.com\/users\/adamlin120\/followers","following_url":"https:\/\/api.github.com\/users\/adamlin120\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adamlin120\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adamlin120\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adamlin120\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adamlin120\/orgs","repos_url":"https:\/\/api.github.com\/users\/adamlin120\/repos","events_url":"https:\/\/api.github.com\/users\/adamlin120\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adamlin120\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-29T06:34:02Z","updated_at":"2021-03-31T12:48:01Z","closed_at":"2021-03-31T12:48:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi @yjernite, thank you for adding MultiWoZ 2.2 in the huggingface datasets platform. It is beneficial!\r\n\r\nI spot an error that the order of Dialogue action slot names and values are reversed.\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/649b2c469779bc4221e1b6969aa2496d63eb5953\/datasets\/multi_woz_v22\/multi_woz_v22.py#L251-L262","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2128\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2128\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2127","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2127\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2127\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2127\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2127","id":843017199,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAyNDYxMzc3","number":2127,"title":"make documentation more clear to use different cloud storage","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-29T06:24:06Z","updated_at":"2021-03-29T12:16:24Z","closed_at":"2021-03-29T12:16:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2127","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2127","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2127.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2127.patch","merged_at":"2021-03-29T12:16:24Z"},"body":"This PR extends the cloud storage documentation. To show you can use a different `fsspec` implementation. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2127\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2127\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2126","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2126\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2126\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2126\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2126","id":842779966,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAyMjcyMjg4","number":2126,"title":"Replace legacy torch.Tensor constructor with torch.tensor","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-28T16:57:30Z","updated_at":"2021-03-29T09:27:14Z","closed_at":"2021-03-29T09:27:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2126","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2126","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2126.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2126.patch","merged_at":"2021-03-29T09:27:13Z"},"body":"The title says it all (motivated by [this issue](https:\/\/github.com\/pytorch\/pytorch\/issues\/53146) in the pytorch repo).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2126\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2126\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2125","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2125\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2125\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2125\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2125","id":842690570,"node_id":"MDU6SXNzdWU4NDI2OTA1NzA=","number":2125,"title":"Is dataset timit_asr broken?","user":{"login":"kosuke-kitahara","id":42398050,"node_id":"MDQ6VXNlcjQyMzk4MDUw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42398050?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kosuke-kitahara","html_url":"https:\/\/github.com\/kosuke-kitahara","followers_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/followers","following_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/orgs","repos_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/repos","events_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-28T08:30:18Z","updated_at":"2021-03-28T12:29:25Z","closed_at":"2021-03-28T12:29:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Using `timit_asr` dataset, I saw all records are the same.\r\n\r\n``` python\r\nfrom datasets import load_dataset, load_metric\r\n\r\ntimit = load_dataset(\"timit_asr\")\r\n\r\nfrom datasets import ClassLabel\r\nimport random\r\nimport pandas as pd\r\nfrom IPython.display import display, HTML\r\n\r\ndef show_random_elements(dataset, num_examples=10):\r\n assert num_examples <= len(dataset), \"Can't pick more elements than there are in the dataset.\"\r\n picks = []\r\n for _ in range(num_examples):\r\n pick = random.randint(0, len(dataset)-1)\r\n while pick in picks:\r\n pick = random.randint(0, len(dataset)-1)\r\n picks.append(pick)\r\n\r\n df = pd.DataFrame(dataset[picks])\r\n display(HTML(df.to_html()))\r\n\r\n\r\nshow_random_elements(timit['train'].remove_columns([\"file\", \"phonetic_detail\", \"word_detail\", \"dialect_region\", \"id\", \r\n \"sentence_type\", \"speaker_id\"]), num_examples=20)\r\n\r\n```\r\n\r\n`output`\r\n\r\n\"Screen\r\n\r\n\r\nI double-checked it [here](https:\/\/huggingface.co\/datasets\/viewer\/), and met the same problem.\r\n\r\n\"Screen\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2125\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2125\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2124","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2124\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2124\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2124\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2124","id":842627729,"node_id":"MDU6SXNzdWU4NDI2Mjc3Mjk=","number":2124,"title":"Adding ScaNN library to do MIPS?","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-28T00:07:00Z","updated_at":"2021-03-29T13:23:43Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"@lhoestq Hi I am thinking of adding this new google library to do the MIPS similar to **add_faiss_idex**. As the paper suggests, it is really fast when it comes to retrieving the nearest neighbors. \r\n\r\nhttps:\/\/github.com\/google-research\/google-research\/tree\/master\/scann\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/16892570\/112738294-78ec9800-8fc6-11eb-9a5f-3d7ee5818e76.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2124\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2124\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2123","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2123\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2123\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2123\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2123","id":842577285,"node_id":"MDU6SXNzdWU4NDI1NzcyODU=","number":2123,"title":"Problem downloading GEM wiki_auto_asset_turk dataset","user":{"login":"mille-s","id":29705940,"node_id":"MDQ6VXNlcjI5NzA1OTQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29705940?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mille-s","html_url":"https:\/\/github.com\/mille-s","followers_url":"https:\/\/api.github.com\/users\/mille-s\/followers","following_url":"https:\/\/api.github.com\/users\/mille-s\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mille-s\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mille-s\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mille-s\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mille-s\/orgs","repos_url":"https:\/\/api.github.com\/users\/mille-s\/repos","events_url":"https:\/\/api.github.com\/users\/mille-s\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mille-s\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-03-27T18:41:28Z","updated_at":"2021-05-12T16:15:18Z","closed_at":"2021-05-12T16:15:17Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"@yjernite \r\n\r\n### Summary\r\n\r\nI am currently working on the GEM datasets and do not manage to download the wiki_auto_asset_turk data, whereas all other datasets download well with the same code.\r\n\r\n### Steps to reproduce\r\nCode snippet:\r\n\r\nfrom datasets import load_dataset\r\n#dataset = load_dataset('gem', 'web_nlg_en')\r\ndataset = load_dataset('gem', 'wiki_auto_asset_turk')\r\n\r\n```\r\n\r\n**Expected behavior:**\r\n\r\nI expect the dataset to start downloading (download bar appears and progresses toward 100%)\r\n\r\n**Actual behavior:**\r\nInstead of seeing the download bar appearing, nothing happens; the following appears in the console as expected, but nothing more:\r\n\r\nDownloading: 36.6kB [00:00, 37.2MB\/s]\r\nDownloading: 41.7kB [00:00, ?B\/s]\r\nDownloading and preparing dataset gem\/wiki_auto_asset_turk (download: 121.37 MiB, generated: 145.69 MiB, post-processed: Unknown size, total: 267.07 MiB) to C:\\Users\\sfmil\\.cache\\huggingface\\datasets\\gem\\wiki_auto_asset_turk\\1.0.0\\f252756d7f1b8f019aac71a1623b2950acfe10d25d956668ac4eae4e93c58b8d...\r\n\r\n### Is this a regression?\r\nNo, it was the first time I was trying to download this dataset (same for the other ones).\r\n\r\n### Debug info\r\n- Python version: Python 3.8.2\r\n- OS version: Windows 10 Family","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2123\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2123\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2122","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2122\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2122\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2122\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2122","id":842194588,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAxODE3MjI0","number":2122,"title":"Fast table queries with interpolation search","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-26T18:09:20Z","updated_at":"2021-08-04T18:11:59Z","closed_at":"2021-04-06T14:33:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2122","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2122","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2122.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2122.patch","merged_at":"2021-04-06T14:33:01Z"},"body":"## Intro\r\n\r\nThis should fix issue #1803 \r\n\r\nCurrently querying examples in a dataset is O(n) because of the underlying pyarrow ChunkedArrays implementation.\r\nTo fix this I implemented interpolation search that is pretty effective since datasets usually verifies the condition of evenly distributed chunks (the default chunk size is fixed).\r\n\r\n## Benchmark\r\n\r\nHere is a [benchmark](https:\/\/pastebin.com\/utEXUqsR) I did on bookcorpus (74M rows):\r\n\r\nfor the current implementation\r\n```python\r\n>>> python speed.py\r\nLoaded dataset 'bookcorpus', len=74004228, nbytes=4835358766\r\n\r\n\r\n========================= Querying unshuffled bookcorpus =========================\r\n\r\nAvg access time key=1 : 0.018ms\r\nAvg access time key=74004227 : 0.215ms\r\nAvg access time key=range(74003204, 74004228) : 1.416ms\r\nAvg access time key=RandIter(low=0, high=74004228, size=1024, seed=42): 92.532ms\r\n\r\n========================== Querying shuffled bookcorpus ==========================\r\n\r\nAvg access time key=1 : 0.187ms\r\nAvg access time key=74004227 : 6.642ms\r\nAvg access time key=range(74003204, 74004228) : 90.941ms\r\nAvg access time key=RandIter(low=0, high=74004228, size=1024, seed=42): 3448.456ms\r\n```\r\n\r\nfor the new one using interpolation search:\r\n```python\r\n>>> python speed.py\r\nLoaded dataset 'bookcorpus', len=74004228, nbytes=4835358766\r\n\r\n\r\n========================= Querying unshuffled bookcorpus =========================\r\n\r\nAvg access time key=1 : 0.076ms\r\nAvg access time key=74004227 : 0.056ms\r\nAvg access time key=range(74003204, 74004228) : 1.807ms\r\nAvg access time key=RandIter(low=0, high=74004228, size=1024, seed=42): 24.028ms\r\n\r\n========================== Querying shuffled bookcorpus ==========================\r\n\r\nAvg access time key=1 : 0.061ms\r\nAvg access time key=74004227 : 0.058ms\r\nAvg access time key=range(74003204, 74004228) : 22.166ms\r\nAvg access time key=RandIter(low=0, high=74004228, size=1024, seed=42): 42.757ms\r\n```\r\n\r\nThe RandIter class is just an iterable of 1024 random indices from 0 to 74004228.\r\n\r\nHere is also a plot showing the speed improvement depending on the dataset size:\r\n![image](https:\/\/user-images.githubusercontent.com\/42851186\/112673587-32335c80-8e65-11eb-9a0c-58ad774abaec.png)\r\n\r\n## Implementation details:\r\n- `datasets.table.Table` objects implement interpolation search for the `slice` method\r\n- The interpolation search requires to store the offsets of all the chunks of a table. The offsets are stored when the `Table` is initialized.\r\n- `datasets.table.Table.slice` returns a `datasets.table.Table` using interpolation search\r\n- `datasets.table.Table.fast_slice` returns a `pyarrow.Table` object using interpolation search. This is useful to get a part of a dataset if we don't need the indexing structure for future computations. For example it's used when querying an example as a dictionary.\r\n- Now a `Dataset` object is always backed by a `datasets.table.Table` object. If one passes a `pyarrow.Table` to initialize a `Dataset`, then it's converted to a `datasets.table.Table`\r\n\r\n## Checklist:\r\n\r\n- [x] implement interpolation search\r\n- [x] use `datasets.table.Table` in `Dataset` objects\r\n- [x] update current tests\r\n- [x] add tests for interpolation search\r\n- [x] comments and docstring\r\n- [x] add the benchmark to the CI\r\n\r\nFix #1803.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2122\/reactions","total_count":4,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":4,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2122\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2121","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2121\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2121\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2121\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2121","id":842148633,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAxNzc4NDc4","number":2121,"title":"Add Validation For README","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-26T17:02:17Z","updated_at":"2021-05-10T13:17:18Z","closed_at":"2021-05-10T09:41:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2121","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2121","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2121.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2121.patch","merged_at":"2021-05-10T09:41:41Z"},"body":"Hi @lhoestq, @yjernite \r\n\r\nThis is a simple Readme parser. All classes specific to different sections can inherit `Section` class, and we can define more attributes in each.\r\n\r\nLet me know if this is going in the right direction :)\r\n\r\nCurrently the output looks like this, for `to_dict()` on `FashionMNIST` `README.md`:\r\n\r\n```json\r\n{\r\n \"name\": \".\/datasets\/fashion_mnist\/README.md\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Dataset Card for FashionMNIST\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Table of Contents\",\r\n \"attributes\": \"- [Dataset Description](#dataset-description)\\n - [Dataset Summary](#dataset-summary)\\n - [Supported Tasks](#supported-tasks-and-leaderboards)\\n - [Languages](#languages)\\n- [Dataset Structure](#dataset-structure)\\n - [Data Instances](#data-instances)\\n - [Data Fields](#data-instances)\\n - [Data Splits](#data-instances)\\n- [Dataset Creation](#dataset-creation)\\n - [Curation Rationale](#curation-rationale)\\n - [Source Data](#source-data)\\n - [Annotations](#annotations)\\n - [Personal and Sensitive Information](#personal-and-sensitive-information)\\n- [Considerations for Using the Data](#considerations-for-using-the-data)\\n - [Social Impact of Dataset](#social-impact-of-dataset)\\n - [Discussion of Biases](#discussion-of-biases)\\n - [Other Known Limitations](#other-known-limitations)\\n- [Additional Information](#additional-information)\\n - [Dataset Curators](#dataset-curators)\\n - [Licensing Information](#licensing-information)\\n - [Citation Information](#citation-information)\\n - [Contributions](#contributions)\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Dataset Description\",\r\n \"attributes\": \"- **Homepage:** [GitHub](https:\/\/github.com\/zalandoresearch\/fashion-mnist)\\n- **Repository:** [GitHub](https:\/\/github.com\/zalandoresearch\/fashion-mnist)\\n- **Paper:** [arXiv](https:\/\/arxiv.org\/pdf\/1708.07747.pdf)\\n- **Leaderboard:**\\n- **Point of Contact:**\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Dataset Summary\",\r\n \"attributes\": \"Fashion-MNIST is a dataset of Zalando's article images\\u2014consisting of a training set of 60,000 examples and a test set of 10,000 examples. Each example is a 28x28 grayscale image, associated with a label from 10 classes. We intend Fashion-MNIST to serve as a direct drop-in replacement for the original MNIST dataset for benchmarking machine learning algorithms. It shares the same image size and structure of training and testing splits.\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Supported Tasks and Leaderboards\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Languages\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Dataset Structure\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Data Instances\",\r\n \"attributes\": \"A data point comprises an image and its label.\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Data Fields\",\r\n \"attributes\": \"- `image`: a 2d array of integers representing the 28x28 image.\\n- `label`: an integer between 0 and 9 representing the classes with the following mapping:\\n | Label | Description |\\n | --- | --- |\\n | 0 | T-shirt\/top |\\n | 1 | Trouser |\\n | 2 | Pullover |\\n | 3 | Dress |\\n | 4 | Coat |\\n | 5 | Sandal |\\n | 6 | Shirt |\\n | 7 | Sneaker |\\n | 8 | Bag |\\n | 9 | Ankle boot |\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Data Splits\",\r\n \"attributes\": \"The data is split into training and test set. The training set contains 60,000 images and the test set 10,000 images.\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Dataset Creation\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Curation Rationale\",\r\n \"attributes\": \"**From the arXiv paper:**\\nThe original MNIST dataset contains a lot of handwritten digits. Members of the AI\/ML\/Data Science community love this dataset and use it as a benchmark to validate their algorithms. In fact, MNIST is often the first dataset researchers try. \\\"If it doesn't work on MNIST, it won't work at all\\\", they said. \\\"Well, if it does work on MNIST, it may still fail on others.\\\"\\nHere are some good reasons:\\n- MNIST is too easy. Convolutional nets can achieve 99.7% on MNIST. Classic machine learning algorithms can also achieve 97% easily. Check out our side-by-side benchmark for Fashion-MNIST vs. MNIST, and read \\\"Most pairs of MNIST digits can be distinguished pretty well by just one pixel.\\\"\\n- MNIST is overused. In this April 2017 Twitter thread, Google Brain research scientist and deep learning expert Ian Goodfellow calls for people to move away from MNIST.\\n- MNIST can not represent modern CV tasks, as noted in this April 2017 Twitter thread, deep learning expert\/Keras author Fran\\u00e7ois Chollet.\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Source Data\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Initial Data Collection and Normalization\",\r\n \"attributes\": \"**From the arXiv paper:**\\nFashion-MNIST is based on the assortment on Zalando\\u2019s website. Every fashion product on Zalando has a set of pictures shot by professional photographers, demonstrating different aspects of the product, i.e. front and back looks, details, looks with model and in an outfit. The original picture has a light-gray background (hexadecimal color: #fdfdfd) and stored in 762 \\u00d7 1000 JPEG format. For efficiently serving different frontend components, the original picture is resampled with multiple resolutions, e.g. large, medium, small, thumbnail and tiny.\\nWe use the front look thumbnail images of 70,000 unique products to build Fashion-MNIST. Those products come from different gender groups: men, women, kids and neutral. In particular, whitecolor products are not included in the dataset as they have low contrast to the background. The thumbnails (51 \\u00d7 73) are then fed into the following conversion pipeline:\\n1. Converting the input to a PNG image.\\n2. Trimming any edges that are close to the color of the corner pixels. The \\u201ccloseness\\u201d is defined by the distance within 5% of the maximum possible intensity in RGB space.\\n3. Resizing the longest edge of the image to 28 by subsampling the pixels, i.e. some rows and columns are skipped over.\\n4. Sharpening pixels using a Gaussian operator of the radius and standard deviation of 1.0, with increasing effect near outlines.\\n5. Extending the shortest edge to 28 and put the image to the center of the canvas.\\n6. Negating the intensities of the image.\\n7. Converting the image to 8-bit grayscale pixels.\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Who are the source image producers?\",\r\n \"attributes\": \"**From the arXiv paper:**\\nEvery fashion product on Zalando has a set of pictures shot by professional photographers, demonstrating different aspects of the product, i.e. front and back looks, details, looks with model and in an outfit.\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Annotations\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Annotation process\",\r\n \"attributes\": \"**From the arXiv paper:**\\nFor the class labels, they use the silhouette code of the product. The silhouette code is manually labeled by the in-house fashion experts and reviewed by a separate team at Zalando. Each product Zalando is the Europe\\u2019s largest online fashion platform. Each product contains only one silhouette code.\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Who are the annotators?\",\r\n \"attributes\": \"**From the arXiv paper:**\\nThe silhouette code is manually labeled by the in-house fashion experts and reviewed by a separate team at Zalando.\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Personal and Sensitive Information\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Considerations for Using the Data\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Social Impact of Dataset\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Discussion of Biases\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Other Known Limitations\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Additional Information\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Dataset Curators\",\r\n \"attributes\": \"Han Xiao and Kashif Rasul and Roland Vollgraf\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Licensing Information\",\r\n \"attributes\": \"MIT Licence\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Citation Information\",\r\n \"attributes\": \"@article{DBLP:journals\/corr\/abs-1708-07747,\\n author = {Han Xiao and\\n Kashif Rasul and\\n Roland Vollgraf},\\n title = {Fashion-MNIST: a Novel Image Dataset for Benchmarking Machine Learning\\n Algorithms},\\n journal = {CoRR},\\n volume = {abs\/1708.07747},\\n year = {2017},\\n url = {http:\/\/arxiv.org\/abs\/1708.07747},\\n archivePrefix = {arXiv},\\n eprint = {1708.07747},\\n timestamp = {Mon, 13 Aug 2018 16:47:27 +0200},\\n biburl = {https:\/\/dblp.org\/rec\/bib\/journals\/corr\/abs-1708-07747},\\n bibsource = {dblp computer science bibliography, https:\/\/dblp.org}\\n}\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Contributions\",\r\n \"attributes\": \"Thanks to [@gchhablani](https:\/\/github.com\/gchablani) for adding this dataset.\",\r\n \"subsections\": []\r\n }\r\n ]\r\n }\r\n ]\r\n }\r\n ]\r\n}\r\n```\r\n\r\nThanks,\r\nGunjan","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2121\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2121\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2120","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2120\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2120\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2120\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2120","id":841954521,"node_id":"MDU6SXNzdWU4NDE5NTQ1MjE=","number":2120,"title":"dataset viewer does not work anymore ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-26T13:22:13Z","updated_at":"2021-03-26T15:52:22Z","closed_at":"2021-03-26T15:52:22Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI normally use this link to see all datasets and how I can load them \r\n\r\n\r\nhttps:\/\/huggingface.co\/datasets\/viewer\/\r\n\r\nNow I am getting \r\n\r\n502 Bad Gateway\r\nnginx\/1.18.0 (Ubuntu)\r\n\r\ncould you bring this webpage back ? this was very helpful @lhoestq \r\nthanks for your help ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2120\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2120\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2119","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2119\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2119\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2119\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2119","id":841567199,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAxMjg2MjIy","number":2119,"title":"copy.deepcopy os.environ instead of copy","user":{"login":"NihalHarish","id":5506053,"node_id":"MDQ6VXNlcjU1MDYwNTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5506053?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NihalHarish","html_url":"https:\/\/github.com\/NihalHarish","followers_url":"https:\/\/api.github.com\/users\/NihalHarish\/followers","following_url":"https:\/\/api.github.com\/users\/NihalHarish\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NihalHarish\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NihalHarish\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NihalHarish\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NihalHarish\/orgs","repos_url":"https:\/\/api.github.com\/users\/NihalHarish\/repos","events_url":"https:\/\/api.github.com\/users\/NihalHarish\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NihalHarish\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-26T03:58:38Z","updated_at":"2021-03-26T15:13:52Z","closed_at":"2021-03-26T15:13:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2119","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2119","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2119.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2119.patch","merged_at":"2021-03-26T15:13:52Z"},"body":"Fixes: https:\/\/github.com\/huggingface\/datasets\/issues\/2115\r\n\r\n- bug fix: using envrion.copy() returns a dict.\r\n- using deepcopy(environ) returns an `_environ` object\r\n- Changing the datatype of the _environ object can break code, if subsequent libraries perform operations using apis exclusive to the environ object, like `environ.getenv()` for example.\r\n\r\n\r\nTesting:\r\n\r\nTested the change on my terminal:\r\n\r\n```\r\n>>> import os\r\n>>> x = deepcopy(os.environ)\r\n>>> y = os.environ\r\n>>> x is y\r\nFalse\r\n>>> isinstance(x, type(os.environ))\r\nTrue\r\n>>> z = os.environ.copy()\r\n>>> isinstance(z, type(os.environ))\r\nFalse\r\n>>> isinstance(z, dict)\r\nTrue\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2119\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2119\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2118","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2118\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2118\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2118\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2118","id":841563329,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAxMjgzMDUx","number":2118,"title":"Remove os.environ.copy in Dataset.map","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-26T03:48:17Z","updated_at":"2021-03-26T12:03:23Z","closed_at":"2021-03-26T12:00:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2118","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2118","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2118.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2118.patch","merged_at":null},"body":"Replace `os.environ.copy` with in-place modification\r\nFixes #2115 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2118\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2118\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2117","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2117\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2117\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2117\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2117","id":841535283,"node_id":"MDU6SXNzdWU4NDE1MzUyODM=","number":2117,"title":"load_metric from local \"glue.py\" meet error 'NoneType' object is not callable","user":{"login":"Frankie123421","id":54012361,"node_id":"MDQ6VXNlcjU0MDEyMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/54012361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Frankie123421","html_url":"https:\/\/github.com\/Frankie123421","followers_url":"https:\/\/api.github.com\/users\/Frankie123421\/followers","following_url":"https:\/\/api.github.com\/users\/Frankie123421\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Frankie123421\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Frankie123421\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Frankie123421\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Frankie123421\/orgs","repos_url":"https:\/\/api.github.com\/users\/Frankie123421\/repos","events_url":"https:\/\/api.github.com\/users\/Frankie123421\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Frankie123421\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-26T02:35:22Z","updated_at":"2021-08-25T21:44:05Z","closed_at":"2021-03-26T02:40:26Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"actual_task = \"mnli\" if task == \"mnli-mm\" else task\r\ndataset = load_dataset(path='\/home\/glue.py', name=actual_task)\r\nmetric = load_metric(path='\/home\/glue.py', name=actual_task)\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n 1 actual_task = \"mnli\" if task == \"mnli-mm\" else task\r\n 2 dataset = load_dataset(path='\/home\/jcli\/glue.py', name=actual_task)\r\n----> 3 metric = load_metric(path='\/home\/jcli\/glue.py', name=actual_task)\r\n\r\n~\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/load.py in load_metric(path, config_name, process_id, num_process, cache_dir, experiment_id, keep_in_memory, download_config, download_mode, script_version, **metric_init_kwargs)\r\n 508 keep_in_memory=keep_in_memory,\r\n 509 experiment_id=experiment_id,\r\n--> 510 **metric_init_kwargs,\r\n 511 )\r\n 512 \r\n\r\nTypeError: 'NoneType' object is not callable\r\n\r\nPlease help","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2117\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2117\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2116","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2116\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2116\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2116\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2116","id":841481292,"node_id":"MDU6SXNzdWU4NDE0ODEyOTI=","number":2116,"title":"Creating custom dataset results in error while calling the map() function","user":{"login":"GeetDsa","id":13940397,"node_id":"MDQ6VXNlcjEzOTQwMzk3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13940397?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/GeetDsa","html_url":"https:\/\/github.com\/GeetDsa","followers_url":"https:\/\/api.github.com\/users\/GeetDsa\/followers","following_url":"https:\/\/api.github.com\/users\/GeetDsa\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/GeetDsa\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/GeetDsa\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/GeetDsa\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/GeetDsa\/orgs","repos_url":"https:\/\/api.github.com\/users\/GeetDsa\/repos","events_url":"https:\/\/api.github.com\/users\/GeetDsa\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/GeetDsa\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-26T00:37:46Z","updated_at":"2021-03-31T14:30:32Z","closed_at":"2021-03-31T14:30:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"calling `map()` of `datasets` library results into an error while defining a Custom dataset.\r\nReproducible example:\r\n```\r\nimport datasets\r\nclass MyDataset(datasets.Dataset):\r\n\r\n def __init__(self, sentences):\r\n \"Initialization\"\r\n self.samples = sentences\r\n\r\n def __len__(self):\r\n \"Denotes the total number of samples\"\r\n return len(self.samples)\r\n\r\n def __getitem__(self, index):\r\n \"Generates one sample of data\"\r\n # Select sample\r\n # Load data and get label\r\n samples = self.samples[index]\r\n\r\n return samples\r\n\r\ndef preprocess_function_train(examples):\r\n inputs = examples\r\n labels = [example+tokenizer.eos_token for example in examples ]\r\n inputs = tokenizer(inputs, max_length=30, padding=True, truncation=True)\r\n labels = tokenizer(labels, max_length=30, padding=True, truncation=True)\r\n model_inputs = inputs\r\n model_inputs[\"labels\"] = labels[\"input_ids\"]\r\n print(\"about to return\")\r\n return model_inputs\r\n\r\n\r\n##train[\"sentence\"] is dataframe column\r\ntrain_dataset = MyDataset(train['sentence'].values.tolist())\r\ntrain_dataset = train_dataset.map(\r\n preprocess_function,\r\n batched = True,\r\n batch_size=32\r\n )\r\n```\r\n\r\nStack trace of error:\r\n```\r\nTraceback (most recent call last):\r\n File \"dir\/train_generate.py\", line 362, in \r\n main()\r\n File \"dir\/train_generate.py\", line 245, in main\r\n train_dataset = train_dataset.map(\r\n File \"anaconda_dir\/anaconda3\/envs\/env1\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 1244, in map\r\n return self._map_single(\r\n File \"anaconda_dir\/anaconda3\/envs\/env1\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 149, in wrapper\r\n unformatted_columns = set(self.column_names) - set(self._format_columns or [])\r\n File \"anaconda_dir\/anaconda3\/envs\/env1\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 526, in column_names\r\n return self._data.column_names\r\nAttributeError: 'MyDataset' object has no attribute '_data'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2116\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2116\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2115","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2115\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2115\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2115\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2115","id":841283974,"node_id":"MDU6SXNzdWU4NDEyODM5NzQ=","number":2115,"title":"The datasets.map() implementation modifies the datatype of os.environ object","user":{"login":"leleamol","id":19983848,"node_id":"MDQ6VXNlcjE5OTgzODQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19983848?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/leleamol","html_url":"https:\/\/github.com\/leleamol","followers_url":"https:\/\/api.github.com\/users\/leleamol\/followers","following_url":"https:\/\/api.github.com\/users\/leleamol\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/leleamol\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/leleamol\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/leleamol\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/leleamol\/orgs","repos_url":"https:\/\/api.github.com\/users\/leleamol\/repos","events_url":"https:\/\/api.github.com\/users\/leleamol\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/leleamol\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-25T20:29:19Z","updated_at":"2021-03-26T15:13:52Z","closed_at":"2021-03-26T15:13:52Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In our testing, we noticed that the datasets.map() implementation is modifying the datatype of python os.environ object from '_Environ' to 'dict'.\r\n\r\nThis causes following function calls to fail as follows:\r\n\r\n` \r\n x = os.environ.get(\"TEST_ENV_VARIABLE_AFTER_dataset_map\", default=None)\r\n TypeError: get() takes no keyword arguments\r\n`\r\nIt looks like the following line in datasets.map implementation introduced this functionality.\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/0cb1ac06acb0df44a1cf4128d03a01865faa2504\/src\/datasets\/arrow_dataset.py#L1421\r\n\r\nHere is the test script to reproduce this error. \r\n\r\n\r\n```\r\nfrom datasets import load_dataset\r\nfrom transformers import AutoTokenizer\r\nimport os\r\n\r\n\r\ndef test_train():\r\n model_checkpoint = \"distilgpt2\"\r\n datasets = load_dataset('wikitext', 'wikitext-2-raw-v1')\r\n tokenizer = AutoTokenizer.from_pretrained(model_checkpoint, use_fast=True)\r\n tokenizer.pad_token = tokenizer.eos_token\r\n\r\n\r\n def tokenize_function(examples):\r\n y = tokenizer(examples['text'], truncation=True, max_length=64)\r\n return y\r\n\r\n x = os.environ.get(\"TEST_ENV_VARIABLE_BEFORE_dataset_map\", default=None)\r\n print(f\"Testing environment variable: TEST_ENV_VARIABLE_BEFORE_dataset_map {x}\")\r\n print(f\"Data type of os.environ before datasets.map = {os.environ.__class__.__name__}\")\r\n datasets.map(tokenize_function, batched=True, num_proc=2, remove_columns=[\"text\"])\r\n print(f\"Data type of os.environ after datasets.map = {os.environ.__class__.__name__}\")\r\n x = os.environ.get(\"TEST_ENV_VARIABLE_AFTER_dataset_map\", default=None)\r\n print(f\"Testing environment variable: TEST_ENV_VARIABLE_AFTER_dataset_map {x}\")\r\n\r\n\r\nif __name__ == \"__main__\":\r\n test_train()\r\n\r\n\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2115\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2115\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2114","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2114\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2114\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2114\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2114","id":841207878,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwOTc1MTA3","number":2114,"title":"Support for legal NLP datasets (EURLEX, ECtHR cases and EU-REG-IR)","user":{"login":"iliaschalkidis","id":1626984,"node_id":"MDQ6VXNlcjE2MjY5ODQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1626984?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliaschalkidis","html_url":"https:\/\/github.com\/iliaschalkidis","followers_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/followers","following_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/repos","events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-25T18:40:17Z","updated_at":"2021-03-31T10:38:50Z","closed_at":"2021-03-31T10:38:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2114","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2114","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2114.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2114.patch","merged_at":"2021-03-31T10:38:50Z"},"body":"Add support for two legal NLP datasets:\r\n\r\n- EURLEX (https:\/\/www.aclweb.org\/anthology\/P19-1636\/)\r\n- ECtHR cases (https:\/\/arxiv.org\/abs\/2103.13084)\r\n- EU-REG-IR (https:\/\/arxiv.org\/abs\/2101.10726)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2114\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2114\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2113","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2113\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2113\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2113\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2113","id":841191303,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwOTYxMDEz","number":2113,"title":"Implement Dataset as context manager","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-25T18:18:30Z","updated_at":"2021-03-31T11:30:14Z","closed_at":"2021-03-31T08:30:11Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2113","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2113","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2113.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2113.patch","merged_at":"2021-03-31T08:30:11Z"},"body":"When used as context manager, it would be safely deleted if some exception is raised.\r\n\r\nThis will avoid \r\n> During handling of the above exception, another exception occurred:","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2113\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2113\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2112","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2112\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2112\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2112\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2112","id":841098008,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwODgyMjA0","number":2112,"title":"Support for legal NLP datasets (EURLEX and ECtHR cases)","user":{"login":"iliaschalkidis","id":1626984,"node_id":"MDQ6VXNlcjE2MjY5ODQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1626984?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliaschalkidis","html_url":"https:\/\/github.com\/iliaschalkidis","followers_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/followers","following_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/repos","events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-25T16:24:17Z","updated_at":"2021-03-25T18:39:31Z","closed_at":"2021-03-25T18:34:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2112","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2112","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2112.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2112.patch","merged_at":null},"body":"Add support for two legal NLP datasets:\r\n- EURLEX (https:\/\/www.aclweb.org\/anthology\/P19-1636\/)\r\n- ECtHR cases (https:\/\/arxiv.org\/abs\/2103.13084)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2112\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2112\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2111","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2111\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2111\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2111\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2111","id":841082087,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwODY4OTg5","number":2111,"title":"Compute WER metric iteratively","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-25T16:06:48Z","updated_at":"2021-04-06T07:20:43Z","closed_at":"2021-04-06T07:20:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2111","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2111","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2111.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2111.patch","merged_at":"2021-04-06T07:20:43Z"},"body":"Compute WER metric iteratively to avoid MemoryError.\r\n\r\nFix #2078.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2111\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2111\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2110","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2110\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2110\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2110\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2110","id":840794995,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwNjI1NDQ5","number":2110,"title":"Fix incorrect assertion in builder.py","user":{"login":"dreamgonfly","id":2340721,"node_id":"MDQ6VXNlcjIzNDA3MjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2340721?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dreamgonfly","html_url":"https:\/\/github.com\/dreamgonfly","followers_url":"https:\/\/api.github.com\/users\/dreamgonfly\/followers","following_url":"https:\/\/api.github.com\/users\/dreamgonfly\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dreamgonfly\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dreamgonfly\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dreamgonfly\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dreamgonfly\/orgs","repos_url":"https:\/\/api.github.com\/users\/dreamgonfly\/repos","events_url":"https:\/\/api.github.com\/users\/dreamgonfly\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dreamgonfly\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-25T10:39:20Z","updated_at":"2021-04-12T13:33:03Z","closed_at":"2021-04-12T13:33:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2110","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2110","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2110.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2110.patch","merged_at":"2021-04-12T13:33:03Z"},"body":"Fix incorrect num_examples comparison assertion in builder.py","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2110\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2110\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2109","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2109\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2109\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2109\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2109","id":840746598,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwNTg1MzM5","number":2109,"title":"Add more issue templates and customize issue template chooser","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-25T09:41:53Z","updated_at":"2021-04-19T06:20:11Z","closed_at":"2021-04-19T06:20:11Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2109","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2109","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2109.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2109.patch","merged_at":"2021-04-19T06:20:10Z"},"body":"When opening an issue, it is not evident for the users how to choose a blank issue template. There is a link at the bottom of all the other issue templates (`Don\u2019t see your issue here? Open a blank issue.`), but this is not very visible for users. This is the reason why many users finally chose the `add-dataset` template instead (this is more visible) for issues that indeed are not requesting the addition of a new dataset.\r\n\r\n~~With this PR, the default blank issue template would be as visible as the other templates (as the `add-dataset` template), thus making easier for the users to choose it.~~\r\n\r\nWith this PR:\r\n- more issue templates, besides `add-dataset`, are added: `bug-report` and `feature-request`\r\n- the issue template chooser is customized, so that it now includes a link to `Discussions` for questions","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2109\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2109\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2108","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2108\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2108\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2108\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2108","id":840181055,"node_id":"MDU6SXNzdWU4NDAxODEwNTU=","number":2108,"title":"Is there a way to use a GPU only when training an Index in the process of add_faisis_index?","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-24T21:32:16Z","updated_at":"2021-03-25T06:31:43Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Motivation - Some FAISS indexes like IVF consist of the training step that clusters the dataset into a given number of indexes. It would be nice if we can use a GPU to do the training step and covert the index back to CPU as mention in [this faiss example](https:\/\/gist.github.com\/mdouze\/46d6bbbaabca0b9778fca37ed2bcccf6).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2108\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2108\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2107","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2107\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2107\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2107\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2107","id":839495825,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk5NTAxODE5","number":2107,"title":"Metadata validation","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"assignees":[{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-03-24T08:52:41Z","updated_at":"2021-04-26T08:27:14Z","closed_at":"2021-04-26T08:27:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2107","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2107","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2107.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2107.patch","merged_at":"2021-04-26T08:27:13Z"},"body":"- `pydantic` metadata schema with dedicated validators against our taxonomy\r\n- ci script to validate new changes against this schema and start a vertuous loop\r\n- soft validation on tasks ids since we expect the taxonomy to undergo some changes in the near future\r\n\r\nfor reference with the current validation we have ~365~ 378 datasets with invalid metadata! full error report [_here_.](https:\/\/gist.github.com\/theo-m\/61b3c0c47fc6121d08d3174bd4c2a26b)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2107\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2107\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2106","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2106\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2106\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2106\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2106","id":839084264,"node_id":"MDU6SXNzdWU4MzkwODQyNjQ=","number":2106,"title":"WMT19 Dataset for Kazakh-English is not formatted correctly","user":{"login":"trina731","id":22580542,"node_id":"MDQ6VXNlcjIyNTgwNTQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22580542?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/trina731","html_url":"https:\/\/github.com\/trina731","followers_url":"https:\/\/api.github.com\/users\/trina731\/followers","following_url":"https:\/\/api.github.com\/users\/trina731\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/trina731\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/trina731\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/trina731\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/trina731\/orgs","repos_url":"https:\/\/api.github.com\/users\/trina731\/repos","events_url":"https:\/\/api.github.com\/users\/trina731\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/trina731\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-23T20:14:47Z","updated_at":"2021-03-25T21:36:20Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In addition to the bug of languages being switched from Issue @415, there are incorrect translations in the dataset because the English-Kazakh translations have a one off formatting error.\r\n\r\nThe News Commentary v14 parallel data set for kk-en from http:\/\/www.statmt.org\/wmt19\/translation-task.html has a bug here:\r\n\r\n> Line 94. The Swiss National Bank, for its part, has been battling with the deflationary effects of the franc\u2019s dramatic appreciation over the past few years.\t\u0428\u0432\u0435\u0439\u0446\u0430\u0440\u0438\u044f\u043d\u044b\u04a3 \u04b0\u043b\u0442\u0442\u044b\u049b \u0431\u0430\u043d\u043a\u0456 \u04e9\u0437 \u0442\u0430\u0440\u0430\u043f\u044b\u043d\u0430\u043d, \u0441\u043e\u04a3\u0493\u044b \u0431\u0456\u0440\u043d\u0435\u0448\u0435 \u0436\u044b\u043b \u0456\u0448\u0456\u043d\u0434\u0435 \u0444\u0440\u0430\u043d\u043a \u049b\u04b1\u043d\u044b\u043d\u044b\u04a3 \u049b\u0430\u0442\u0442\u044b \u04e9\u0441\u0443\u0456\u043d\u0456\u04a3 \u0434\u0435\u0444\u043b\u044f\u0446\u0438\u044f\u043b\u044b\u049b \u04d9\u0441\u0435\u0440\u0456\u043c\u0435\u043d \u043a\u04af\u0440\u0435\u0441\u0456\u043f \u043a\u0435\u043b\u0435\u0434\u0456.\r\n> \r\n> Line 95. \u0414\u0435\u0444\u043b\u044f\u0446\u0438\u044f\u043b\u044b\u049b \u043a\u04af\u0448\u0442\u0435\u0440 2008 \u0436\u044b\u043b\u044b \u0442\u0435\u0440\u0435\u04a3 \u0436\u04d9\u043d\u0435 \u04b1\u0437\u0430\u049b\u049b\u0430 \u0441\u043e\u0437\u044b\u043b\u0493\u0430\u043d \u0436\u0430\u04bb\u0430\u043d\u0434\u044b\u049b \u0434\u0430\u0493\u0434\u0430\u0440\u044b\u0441\u049b\u0430 \u0431\u0430\u0439\u043b\u0430\u043d\u044b\u0441\u0442\u044b \u043e\u0440\u044b\u043d \u0430\u043b\u0493\u0430\u043d \u0456\u0440\u0456 \u044d\u043a\u043e\u043d\u043e\u043c\u0438\u043a\u0430\u043b\u044b\u049b \u0436\u04d9\u043d\u0435 \u049b\u0430\u0440\u0436\u044b\u043b\u044b\u049b \u043e\u0440\u044b\u043d \u0430\u043b\u043c\u0430\u0441\u0443\u043b\u0430\u0440\u0434\u044b\u04a3 \u0430\u0440\u049b\u0430\u0441\u044b\u043d\u0434\u0430 \u0431\u043e\u0441\u0430\u0442\u044b\u043b\u0434\u044b. \u0416\u0435\u043a\u0435 \u049b\u0430\u0440\u044b\u0437 \u049b\u0430\u0440\u0430\u0436\u0430\u0442\u044b \u04af\u043b\u0435\u0441\u0456\u043d\u0456\u04a3 \u049b\u044b\u0441\u049b\u0430\u0440\u0443\u044b \u043e\u0440\u0442\u0430\u043b\u044b\u049b \u0431\u0430\u043d\u043a\u0442\u0456\u04a3 \u0440\u0435\u0444\u043b\u044f\u0446\u0438\u044f\u0493\u0430 \u0436\u04b1\u043c\u0441\u0430\u043b\u0493\u0430\u043d \u043a\u04af\u0448-\u0436\u0456\u0433\u0435\u0440\u0456\u043d\u0435 \u0442\u04b1\u0440\u0430\u049b\u0442\u044b \u0441\u043e\u049b\u049b\u0430\u043d \u049b\u0430\u0440\u0441\u044b \u0436\u0435\u043b\u0434\u0435\u0439 \u0431\u043e\u043b\u0434\u044b.\r\n> \r\n> Line 96. The deflationary forces were unleashed by the major economic and financial dislocations associated with the deep and protracted global crisis that erupted in 2008. Private deleveraging became a steady headwind to central bank efforts to reflate.\t2009 \u0436\u044b\u043b\u044b, \u0430\u043b\u0434\u044b\u04a3\u0493\u044b \u049b\u0430\u0442\u0430\u0440\u043b\u044b \u044d\u043a\u043e\u043d\u043e\u043c\u0438\u043a\u0430\u043b\u0430\u0440\u0434\u044b\u04a3 \u0448\u0430\u043c\u0430\u043c\u0435\u043d \u04af\u0448\u0442\u0435\u043d \u0431\u0456\u0440\u0456 \u0431\u0430\u0493\u0430\u043d\u044b\u04a3 \u0442\u04e9\u043c\u0435\u043d\u0434\u0435\u0443\u0456\u043d \u043a\u04e9\u0440\u0441\u0435\u0442\u0442\u0456, \u0431\u04b1\u043b \u0441\u043e\u0493\u044b\u0441\u0442\u0430\u043d \u043a\u0435\u0439\u0456\u043d\u0433\u0456 \u0436\u043e\u0493\u0430\u0440\u044b \u0434\u0435\u04a3\u0433\u0435\u0439 \u0431\u043e\u043b\u0434\u044b.\r\n\r\nAs you can see, line 95 has only the Kazakh translation which should be part of line 96. This causes all of the following English-Kazakh translation pairs to be one off rendering ALL of those translations incorrect. This issue was not fixed when the dataset was imported to Huggingface. By running this code \r\n\r\n```\r\nimport datasets\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('wmt19', 'kk-en')\r\nfor key in dataset['train']['translation']:\r\n if 'The deflationary forces were unleashed by the major economic and financial dislocations associated with the deep and protracted global crisis that erupted in 2008.' in key['kk']:\r\n print(key['en'])\r\n print(key['kk'])\r\n break\r\n```\r\nwe get: \r\n> 2009 \u0436\u044b\u043b\u044b, \u0430\u043b\u0434\u044b\u04a3\u0493\u044b \u049b\u0430\u0442\u0430\u0440\u043b\u044b \u044d\u043a\u043e\u043d\u043e\u043c\u0438\u043a\u0430\u043b\u0430\u0440\u0434\u044b\u04a3 \u0448\u0430\u043c\u0430\u043c\u0435\u043d \u04af\u0448\u0442\u0435\u043d \u0431\u0456\u0440\u0456 \u0431\u0430\u0493\u0430\u043d\u044b\u04a3 \u0442\u04e9\u043c\u0435\u043d\u0434\u0435\u0443\u0456\u043d \u043a\u04e9\u0440\u0441\u0435\u0442\u0442\u0456, \u0431\u04b1\u043b \u0441\u043e\u0493\u044b\u0441\u0442\u0430\u043d \u043a\u0435\u0439\u0456\u043d\u0433\u0456 \u0436\u043e\u0493\u0430\u0440\u044b \u0434\u0435\u04a3\u0433\u0435\u0439 \u0431\u043e\u043b\u0434\u044b.\r\n> The deflationary forces were unleashed by the major economic and financial dislocations associated with the deep and protracted global crisis that erupted in 2008. Private deleveraging became a steady headwind to central bank efforts to reflate.\r\n\r\nwhich shows that the issue still persists in the Huggingface dataset. The Kazakh sentence matches up to the next English sentence in the dataset instead of the current one.\r\n\r\nPlease let me know if there's you have any ideas to fix this one-off error from the dataset or if this can be fixed by Huggingface.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2106\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2106\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2105","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2105\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2105\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2105\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2105","id":839059226,"node_id":"MDU6SXNzdWU4MzkwNTkyMjY=","number":2105,"title":"Request to remove S2ORC dataset","user":{"login":"kyleclo","id":13603748,"node_id":"MDQ6VXNlcjEzNjAzNzQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13603748?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kyleclo","html_url":"https:\/\/github.com\/kyleclo","followers_url":"https:\/\/api.github.com\/users\/kyleclo\/followers","following_url":"https:\/\/api.github.com\/users\/kyleclo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kyleclo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kyleclo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kyleclo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kyleclo\/orgs","repos_url":"https:\/\/api.github.com\/users\/kyleclo\/repos","events_url":"https:\/\/api.github.com\/users\/kyleclo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kyleclo\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-23T19:43:06Z","updated_at":"2021-08-04T19:18:02Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi! I was wondering if it's possible to remove [S2ORC](https:\/\/huggingface.co\/datasets\/s2orc) from hosting on Huggingface's platform? Unfortunately, there are some legal considerations about how we make this data available. Happy to add back to Huggingface's platform once we work out those hurdles! Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2105\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2105\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2104","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2104\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2104\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2104\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2104","id":839027834,"node_id":"MDU6SXNzdWU4MzkwMjc4MzQ=","number":2104,"title":"Trouble loading wiki_movies","user":{"login":"adityaarunsinghal","id":35391599,"node_id":"MDQ6VXNlcjM1MzkxNTk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35391599?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adityaarunsinghal","html_url":"https:\/\/github.com\/adityaarunsinghal","followers_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/followers","following_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/orgs","repos_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/repos","events_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-23T18:59:54Z","updated_at":"2021-04-05T23:17:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\nI am trying to load_dataset(\"wiki_movies\") and it gives me this error - \r\n\r\n`FileNotFoundError: Couldn't find file locally at wiki_movies\/wiki_movies.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.2\/datasets\/wiki_movies\/wiki_movies.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/wiki_movies\/wiki_movies.py`\r\n\r\nTrying to do `python run_mlm.py \\\r\n --model_name_or_path roberta-base \\\r\n --dataset_name wiki_movies \\` also gives the same error. \r\n\r\nIs this something on my end? From what I can tell, this dataset was re-added by @lhoestq a few months ago. \r\nThank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2104\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2104\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2103","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2103\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2103\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2103\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2103","id":838946916,"node_id":"MDU6SXNzdWU4Mzg5NDY5MTY=","number":2103,"title":"citation, homepage, and license fields of `dataset_info.json` are duplicated many times","user":{"login":"samsontmr","id":15007950,"node_id":"MDQ6VXNlcjE1MDA3OTUw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15007950?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/samsontmr","html_url":"https:\/\/github.com\/samsontmr","followers_url":"https:\/\/api.github.com\/users\/samsontmr\/followers","following_url":"https:\/\/api.github.com\/users\/samsontmr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/samsontmr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/samsontmr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/samsontmr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/samsontmr\/orgs","repos_url":"https:\/\/api.github.com\/users\/samsontmr\/repos","events_url":"https:\/\/api.github.com\/users\/samsontmr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/samsontmr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-23T17:18:09Z","updated_at":"2021-04-06T14:39:59Z","closed_at":"2021-04-06T14:39:59Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This happens after a `map` operation when `num_proc` is set to `>1`. I tested this by cleaning up the json before running the `map` op on the dataset so it's unlikely it's coming from an earlier concatenation.\r\n\r\nExample result:\r\n```\r\n\"citation\": \"@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n\r\n```\r\n\r\n@lhoestq and I believe this is happening due to the fields being concatenated `num_proc` times.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2103\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2103\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2102","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2102\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2102\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2102\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2102","id":838794090,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk4OTEyNzUw","number":2102,"title":"Move Dataset.to_csv to csv module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-23T14:35:46Z","updated_at":"2021-03-24T14:07:35Z","closed_at":"2021-03-24T14:07:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2102","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2102","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2102.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2102.patch","merged_at":"2021-03-24T14:07:34Z"},"body":"Move the implementation of `Dataset.to_csv` to module `datasets.io.csv`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2102\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2102\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2101","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2101\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2101\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2101\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2101","id":838586184,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk4NzQzMDM4","number":2101,"title":"MIAM dataset - new citation details","user":{"login":"eusip","id":1551356,"node_id":"MDQ6VXNlcjE1NTEzNTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1551356?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eusip","html_url":"https:\/\/github.com\/eusip","followers_url":"https:\/\/api.github.com\/users\/eusip\/followers","following_url":"https:\/\/api.github.com\/users\/eusip\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eusip\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eusip\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eusip\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eusip\/orgs","repos_url":"https:\/\/api.github.com\/users\/eusip\/repos","events_url":"https:\/\/api.github.com\/users\/eusip\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eusip\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-23T10:41:23Z","updated_at":"2021-03-23T18:08:10Z","closed_at":"2021-03-23T18:08:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2101","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2101","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2101.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2101.patch","merged_at":"2021-03-23T18:08:09Z"},"body":"Hi @lhoestq, I have updated the citations to reference an OpenReview preprint.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2101\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2101\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2100","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2100\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2100\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2100\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2100","id":838574631,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk4NzMzOTM0","number":2100,"title":"Fix deprecated warning message and docstring","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-23T10:27:52Z","updated_at":"2021-03-24T08:19:41Z","closed_at":"2021-03-23T18:03:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2100","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2100","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2100.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2100.patch","merged_at":"2021-03-23T18:03:49Z"},"body":"Fix deprecated warnings:\r\n- Use deprecated Sphinx directive in docstring\r\n- Fix format of deprecated message\r\n- Raise FutureWarning","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2100\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2100\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2099","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2099\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2099\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2099\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2099","id":838523819,"node_id":"MDU6SXNzdWU4Mzg1MjM4MTk=","number":2099,"title":"load_from_disk takes a long time to load local dataset","user":{"login":"samsontmr","id":15007950,"node_id":"MDQ6VXNlcjE1MDA3OTUw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15007950?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/samsontmr","html_url":"https:\/\/github.com\/samsontmr","followers_url":"https:\/\/api.github.com\/users\/samsontmr\/followers","following_url":"https:\/\/api.github.com\/users\/samsontmr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/samsontmr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/samsontmr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/samsontmr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/samsontmr\/orgs","repos_url":"https:\/\/api.github.com\/users\/samsontmr\/repos","events_url":"https:\/\/api.github.com\/users\/samsontmr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/samsontmr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-03-23T09:28:37Z","updated_at":"2021-03-23T17:12:16Z","closed_at":"2021-03-23T17:12:16Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I have an extremely large tokenized dataset (24M examples) that loads in a few minutes. However, after adding a column similar to `input_ids` (basically a list of integers) and saving the dataset to disk, the load time goes to >1 hour. I've even tried using `np.uint8` after seeing #1985 but it doesn't seem to be helping (the total size seems to be smaller though).\r\n\r\nDoes anyone know what could be the issue? Or does the casting of that column to `int8` need to happen in the function that writes the arrow table instead of in the `map` where I create the list of integers?\r\n\r\nTagging @lhoestq since you seem to be working on these issues and PRs :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2099\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2099\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2098","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2098\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2098\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2098\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2098","id":838447959,"node_id":"MDU6SXNzdWU4Mzg0NDc5NTk=","number":2098,"title":"SQuAD version ","user":{"login":"h-peng17","id":39556019,"node_id":"MDQ6VXNlcjM5NTU2MDE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39556019?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/h-peng17","html_url":"https:\/\/github.com\/h-peng17","followers_url":"https:\/\/api.github.com\/users\/h-peng17\/followers","following_url":"https:\/\/api.github.com\/users\/h-peng17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/h-peng17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/h-peng17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/h-peng17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/h-peng17\/orgs","repos_url":"https:\/\/api.github.com\/users\/h-peng17\/repos","events_url":"https:\/\/api.github.com\/users\/h-peng17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/h-peng17\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-23T07:47:54Z","updated_at":"2021-03-26T09:48:54Z","closed_at":"2021-03-26T09:48:54Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi~ \r\nI want train on squad dataset. What's the version of the squad? Is it 1.1 or 1.0? I'm new in QA, I don't find some descriptions about it. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2098\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2098\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2097","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2097\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2097\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2097\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2097","id":838105289,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk4MzM4MTA3","number":2097,"title":"fixes issue #1110 by descending further if `obj[\"_type\"]` is a dict","user":{"login":"dcfidalgo","id":15979778,"node_id":"MDQ6VXNlcjE1OTc5Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15979778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dcfidalgo","html_url":"https:\/\/github.com\/dcfidalgo","followers_url":"https:\/\/api.github.com\/users\/dcfidalgo\/followers","following_url":"https:\/\/api.github.com\/users\/dcfidalgo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dcfidalgo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dcfidalgo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dcfidalgo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dcfidalgo\/orgs","repos_url":"https:\/\/api.github.com\/users\/dcfidalgo\/repos","events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-22T21:00:55Z","updated_at":"2021-03-22T21:01:11Z","closed_at":"2021-03-22T21:01:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2097","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2097","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2097.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2097.patch","merged_at":null},"body":"Check metrics","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2097\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2097\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2096","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2096\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2096\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2096\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2096","id":838038379,"node_id":"MDU6SXNzdWU4MzgwMzgzNzk=","number":2096,"title":"CoNLL 2003 dataset not including German","user":{"login":"rxian","id":8406802,"node_id":"MDQ6VXNlcjg0MDY4MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8406802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rxian","html_url":"https:\/\/github.com\/rxian","followers_url":"https:\/\/api.github.com\/users\/rxian\/followers","following_url":"https:\/\/api.github.com\/users\/rxian\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rxian\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rxian\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rxian\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rxian\/orgs","repos_url":"https:\/\/api.github.com\/users\/rxian\/repos","events_url":"https:\/\/api.github.com\/users\/rxian\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rxian\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-22T19:23:56Z","updated_at":"2021-03-30T09:45:35Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello, thanks for all the work on developing and maintaining this amazing platform, which I am enjoying working with!\r\n\r\nI was wondering if there is a reason why the German CoNLL 2003 dataset is not included in the [repository](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/conll2003), since a copy of it could be found in some places on the internet such as GitHub? I could help adding the German data to the hub, unless there are some copyright issues that I am unaware of...\r\n\r\nThis is considering that many work use the union of CoNLL 2002 and 2003 datasets for comparing cross-lingual NER transfer performance in `en`, `de`, `es`, and `nl`. E.g., [XLM-R](https:\/\/www.aclweb.org\/anthology\/2020.acl-main.747.pdf).\r\n\r\n## Adding a Dataset\r\n- **Name:** CoNLL 2003 German\r\n- **Paper:** https:\/\/www.aclweb.org\/anthology\/W03-0419\/\r\n- **Data:** https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/conll2003\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2096\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2096\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2093","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2093\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2093\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2093\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2093","id":837209211,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk3NTgyNjUx","number":2093,"title":"Fix: Allows a feature to be named \"_type\"","user":{"login":"dcfidalgo","id":15979778,"node_id":"MDQ6VXNlcjE1OTc5Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15979778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dcfidalgo","html_url":"https:\/\/github.com\/dcfidalgo","followers_url":"https:\/\/api.github.com\/users\/dcfidalgo\/followers","following_url":"https:\/\/api.github.com\/users\/dcfidalgo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dcfidalgo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dcfidalgo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dcfidalgo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dcfidalgo\/orgs","repos_url":"https:\/\/api.github.com\/users\/dcfidalgo\/repos","events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-21T23:21:57Z","updated_at":"2021-03-25T14:35:54Z","closed_at":"2021-03-25T14:35:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2093","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2093","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2093.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2093.patch","merged_at":"2021-03-25T14:35:54Z"},"body":"This PR tries to fix issue #1110. Sorry for taking so long to come back to this.\r\n\r\nIt's a simple fix, but i am not sure if it works for all possible types of `obj`. Let me know what you think @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2093\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2093\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2092","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2092\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2092\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2092\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2092","id":836984043,"node_id":"MDU6SXNzdWU4MzY5ODQwNDM=","number":2092,"title":"How to disable making arrow tables in load_dataset ?","user":{"login":"Jeevesh8","id":48825663,"node_id":"MDQ6VXNlcjQ4ODI1NjYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48825663?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Jeevesh8","html_url":"https:\/\/github.com\/Jeevesh8","followers_url":"https:\/\/api.github.com\/users\/Jeevesh8\/followers","following_url":"https:\/\/api.github.com\/users\/Jeevesh8\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Jeevesh8\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Jeevesh8\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Jeevesh8\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Jeevesh8\/orgs","repos_url":"https:\/\/api.github.com\/users\/Jeevesh8\/repos","events_url":"https:\/\/api.github.com\/users\/Jeevesh8\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Jeevesh8\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-21T04:50:07Z","updated_at":"2021-03-26T18:37:40Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Is there a way to disable the construction of arrow tables, or to make them on the fly as the dataset is being used ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2092\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2092\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2091","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2091\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2091\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2091\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2091","id":836831403,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk3Mjk4ODI3","number":2091,"title":"Fix copy snippet in docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-20T15:08:22Z","updated_at":"2021-03-24T08:20:50Z","closed_at":"2021-03-23T17:18:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2091","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2091","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2091.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2091.patch","merged_at":"2021-03-23T17:18:31Z"},"body":"With this change the lines starting with `...` in the code blocks can be properly copied to clipboard.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2091\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2091\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2090","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2090\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2090\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2090\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2090","id":836807498,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk3MjgwNTEy","number":2090,"title":"Add machine translated multilingual STS benchmark dataset","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-20T13:28:07Z","updated_at":"2021-03-29T13:24:42Z","closed_at":"2021-03-29T13:00:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2090","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2090","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2090.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2090.patch","merged_at":"2021-03-29T13:00:15Z"},"body":"also see here https:\/\/github.com\/PhilipMay\/stsb-multi-mt","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2090\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2090\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2089","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2089\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2089\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2089\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2089","id":836788019,"node_id":"MDU6SXNzdWU4MzY3ODgwMTk=","number":2089,"title":"Add documentaton for dataset README.md files","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-20T11:44:38Z","updated_at":"2021-07-12T17:41:40Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nthe dataset README files have special headers.\r\nSomehow a documenation of the allowed values and tags is missing.\r\nCould you add that?\r\n\r\nJust to give some concrete questions that should be answered imo:\r\n- which values can be passted to multilinguality?\r\n- what should be passed to language_creators?\r\n- which values should licenses have? What do I say when it is a custom license? Should I add a link?\r\n- how should I choose size_categories ? What are valid ranges?\r\n- what are valid task_categories?\r\n\r\nThanks\r\nPhilip","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2089\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2089\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2088","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2088\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2088\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2088\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2088","id":836763733,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk3MjQ4Mzk1","number":2088,"title":"change bibtex template to author instead of authors","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-20T09:23:44Z","updated_at":"2021-03-23T15:40:12Z","closed_at":"2021-03-23T15:40:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2088","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2088","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2088.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2088.patch","merged_at":"2021-03-23T15:40:12Z"},"body":"Hi,\r\nIMO when using BibTex Author should be used instead of Authors.\r\nSee here: http:\/\/www.bibtex.org\/Using\/de\/\r\n\r\nThanks\r\nPhilip","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2088\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2088\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2087","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2087\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2087\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2087\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2087","id":836587392,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk3MDg4NTk2","number":2087,"title":"Update metadata if dataset features are modified","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-20T02:05:23Z","updated_at":"2021-04-09T09:25:33Z","closed_at":"2021-04-09T09:25:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2087","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2087","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2087.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2087.patch","merged_at":"2021-04-09T09:25:33Z"},"body":"This PR adds a decorator that updates the dataset metadata if a previously executed transform modifies its features. \r\nFixes #2083 \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2087\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2087\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2086","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2086\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2086\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2086\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2086","id":836249587,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk2Nzg0Mjcz","number":2086,"title":"change user permissions to -rw-r--r--","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-19T18:14:56Z","updated_at":"2021-03-24T13:59:04Z","closed_at":"2021-03-24T13:59:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2086","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2086","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2086.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2086.patch","merged_at":"2021-03-24T13:59:04Z"},"body":"Fix for #2065 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2086\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2086\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2085","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2085\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2085\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2085\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2085","id":835870994,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk2NDYyOTc2","number":2085,"title":"Fix max_wait_time in requests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-19T11:22:26Z","updated_at":"2021-03-23T15:36:38Z","closed_at":"2021-03-23T15:36:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2085","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2085","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2085.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2085.patch","merged_at":"2021-03-23T15:36:37Z"},"body":"it was handled as a min time, not max cc @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2085\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2085\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2084","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2084\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2084\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2084\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2084","id":835750671,"node_id":"MDU6SXNzdWU4MzU3NTA2NzE=","number":2084,"title":"CUAD - Contract Understanding Atticus Dataset","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-19T09:27:43Z","updated_at":"2021-04-16T08:50:44Z","closed_at":"2021-04-16T08:50:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** CUAD - Contract Understanding Atticus Dataset\r\n- **Description:** As one of the only large, specialized NLP benchmarks annotated by experts, CUAD can serve as a challenging research benchmark for the broader NLP community.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2103.06268\r\n- **Data:** https:\/\/github.com\/TheAtticusProject\/cuad\/\r\n- **Motivation:** good domain specific datasets are valuable\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2084\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2084\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2083","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2083\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2083\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2083\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2083","id":835695425,"node_id":"MDU6SXNzdWU4MzU2OTU0MjU=","number":2083,"title":"`concatenate_datasets` throws error when changing the order of datasets to concatenate","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-19T08:29:48Z","updated_at":"2021-04-09T09:25:33Z","closed_at":"2021-04-09T09:25:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hey, \r\n\r\nI played around with the `concatenate_datasets(...)` function: https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html?highlight=concatenate_datasets#datasets.concatenate_datasets\r\n\r\nand noticed that when the order in which the datasets are concatenated changes an error is thrown where it should not IMO.\r\n\r\nHere is a google colab to reproduce the error: https:\/\/colab.research.google.com\/drive\/17VTFU4KQ735-waWZJjeOHS6yDTfV5ekK?usp=sharing","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2083\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2083\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2082","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2082\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2082\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2082\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2082","id":835401555,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk2MDY1NTM0","number":2082,"title":"Updated card using information from data statement and datasheet","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-19T00:39:38Z","updated_at":"2021-03-19T14:29:09Z","closed_at":"2021-03-19T14:29:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2082","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2082","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2082.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2082.patch","merged_at":"2021-03-19T14:29:08Z"},"body":"I updated and clarified the REFreSD [data card](https:\/\/github.com\/mcmillanmajora\/datasets\/blob\/refresd_card\/datasets\/refresd\/README.md) with information from the Eleftheria's [website](https:\/\/elbria.github.io\/post\/refresd\/). I added brief descriptions where the initial card referred to the paper, and I also recreated some of the tables in the paper to show relevant dataset statistics.\r\n\r\nI'll email Eleftheria to see if she has any comments on the card. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2082\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2082\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2081","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2081\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2081\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2081\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2081","id":835112968,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk1ODE3OTM4","number":2081,"title":"Fix docstrings issues","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-18T18:11:01Z","updated_at":"2021-04-07T14:37:43Z","closed_at":"2021-04-07T14:37:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2081","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2081","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2081.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2081.patch","merged_at":"2021-04-07T14:37:43Z"},"body":"Fix docstring issues.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2081\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2081\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2080","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2080\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2080\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2080\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2080","id":835023000,"node_id":"MDU6SXNzdWU4MzUwMjMwMDA=","number":2080,"title":"Multidimensional arrays in a Dataset","user":{"login":"vermouthmjl","id":3142085,"node_id":"MDQ6VXNlcjMxNDIwODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3142085?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vermouthmjl","html_url":"https:\/\/github.com\/vermouthmjl","followers_url":"https:\/\/api.github.com\/users\/vermouthmjl\/followers","following_url":"https:\/\/api.github.com\/users\/vermouthmjl\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vermouthmjl\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vermouthmjl\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vermouthmjl\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vermouthmjl\/orgs","repos_url":"https:\/\/api.github.com\/users\/vermouthmjl\/repos","events_url":"https:\/\/api.github.com\/users\/vermouthmjl\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vermouthmjl\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-18T16:29:14Z","updated_at":"2021-03-25T12:46:53Z","closed_at":"2021-03-25T12:46:53Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI'm trying to put together a `datasets.Dataset` to be used with LayoutLM which is available in `transformers`. This model requires as input the bounding boxes of each of the token of a sequence. This is when I realized that `Dataset` does not support multi-dimensional arrays as a value for a column in a row.\r\n\r\nThe following code results in conversion error in pyarrow (`pyarrow.lib.ArrowInvalid: ('Can only convert 1-dimensional array values', 'Conversion failed for column bbox with type object')`)\r\n\r\n```\r\nfrom datasets import Dataset\r\nimport pandas as pd\r\nimport numpy as np\r\n\r\ndataset = pd.DataFrame({\r\n 'bbox': [\r\n np.array([[1,2,3,4],[1,2,3,4],[1,2,3,4]]),\r\n np.array([[1,2,3,4],[1,2,3,4],[1,2,3,4]]),\r\n np.array([[1,2,3,4],[1,2,3,4],[1,2,3,4]]),\r\n np.array([[1,2,3,4],[1,2,3,4],[1,2,3,4]])\r\n ],\r\n 'input_ids': [1, 2, 3, 4]\r\n})\r\ndataset = Dataset.from_pandas(dataset)\r\n```\r\n\r\nSince I wanted to use pytorch for the downstream training task, I also tried a few ways to directly put in a column of 2-D pytorch tensor in a formatted dataset, but I can only have a list of 1-D tensors, or a list of arrays, or a list of lists.\r\n\r\n```\r\nimport torch\r\nfrom datasets import Dataset\r\nimport pandas as pd\r\n\r\ndataset = pd.DataFrame({\r\n 'bbox': [\r\n [[1,2,3,4],[1,2,3,4],[1,2,3,4]],\r\n [[1,2,3,4],[1,2,3,4],[1,2,3,4]],\r\n [[1,2,3,4],[1,2,3,4],[1,2,3,4]],\r\n [[1,2,3,4],[1,2,3,4],[1,2,3,4]]\r\n ],\r\n 'input_ids': [1, 2, 3, 4]\r\n})\r\ndataset = Dataset.from_pandas(dataset)\r\n\r\ndef test(examples):\r\n return {'bbbox': torch.Tensor(examples['bbox'])}\r\ndataset = dataset.map(test)\r\nprint(dataset[0]['bbox'])\r\nprint(dataset[0]['bbbox'])\r\n\r\ndataset.set_format(type='torch', columns=['input_ids', 'bbox'], output_all_columns=True)\r\nprint(dataset[0]['bbox'])\r\nprint(dataset[0]['bbbox'])\r\n\r\ndef test2(examples):\r\n return {'bbbox': torch.stack(examples['bbox'])}\r\ndataset = dataset.map(test2)\r\n\r\nprint(dataset[0]['bbox'])\r\nprint(dataset[0]['bbbox'])\r\n```\r\n\r\nIs is possible to support n-D arrays\/tensors in datasets? \r\nIt seems that it can also be useful for this [feature request](https:\/\/github.com\/huggingface\/datasets\/issues\/263).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2080\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2080\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2079","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2079\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2079\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2079\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2079","id":834920493,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk1NjU2MDQ5","number":2079,"title":"Refactorize Metric.compute signature to force keyword arguments only","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-18T15:05:50Z","updated_at":"2021-03-23T15:31:44Z","closed_at":"2021-03-23T15:31:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2079","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2079","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2079.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2079.patch","merged_at":"2021-03-23T15:31:44Z"},"body":"Minor refactoring of Metric.compute signature to force the use of keyword arguments, by using the single star syntax.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2079\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2079\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2078","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2078\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2078\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2078\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2078","id":834694819,"node_id":"MDU6SXNzdWU4MzQ2OTQ4MTk=","number":2078,"title":"MemoryError when computing WER metric","user":{"login":"diego-fustes","id":5707233,"node_id":"MDQ6VXNlcjU3MDcyMzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5707233?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/diego-fustes","html_url":"https:\/\/github.com\/diego-fustes","followers_url":"https:\/\/api.github.com\/users\/diego-fustes\/followers","following_url":"https:\/\/api.github.com\/users\/diego-fustes\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/diego-fustes\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/diego-fustes\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/diego-fustes\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/diego-fustes\/orgs","repos_url":"https:\/\/api.github.com\/users\/diego-fustes\/repos","events_url":"https:\/\/api.github.com\/users\/diego-fustes\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/diego-fustes\/received_events","type":"User","site_admin":false},"labels":[{"id":2067393914,"node_id":"MDU6TGFiZWwyMDY3MzkzOTE0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20bug","name":"metric bug","color":"25b21e","default":false,"description":"A bug in a metric script"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":11,"created_at":"2021-03-18T11:30:05Z","updated_at":"2021-05-01T08:31:49Z","closed_at":"2021-04-06T07:20:43Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I'm trying to follow the ASR example to try Wav2Vec. This is the code that I use for WER calculation:\r\n\r\n```\r\nwer = load_metric(\"wer\")\r\nprint(wer.compute(predictions=result[\"predicted\"], references=result[\"target\"]))\r\n```\r\n\r\nHowever, I receive the following exception:\r\n\r\n`Traceback (most recent call last):\r\n File \"\/home\/diego\/IpGlobal\/wav2vec\/test_wav2vec.py\", line 51, in \r\n print(wer.compute(predictions=result[\"predicted\"], references=result[\"target\"]))\r\n File \"\/home\/diego\/miniconda3\/envs\/wav2vec3.6\/lib\/python3.6\/site-packages\/datasets\/metric.py\", line 403, in compute\r\n output = self._compute(predictions=predictions, references=references, **kwargs)\r\n File \"\/home\/diego\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/wer\/73b2d32b723b7fb8f204d785c00980ae4d937f12a65466f8fdf78706e2951281\/wer.py\", line 94, in _compute\r\n return wer(references, predictions)\r\n File \"\/home\/diego\/miniconda3\/envs\/wav2vec3.6\/lib\/python3.6\/site-packages\/jiwer\/measures.py\", line 81, in wer\r\n truth, hypothesis, truth_transform, hypothesis_transform, **kwargs\r\n File \"\/home\/diego\/miniconda3\/envs\/wav2vec3.6\/lib\/python3.6\/site-packages\/jiwer\/measures.py\", line 192, in compute_measures\r\n H, S, D, I = _get_operation_counts(truth, hypothesis)\r\n File \"\/home\/diego\/miniconda3\/envs\/wav2vec3.6\/lib\/python3.6\/site-packages\/jiwer\/measures.py\", line 273, in _get_operation_counts\r\n editops = Levenshtein.editops(source_string, destination_string)\r\nMemoryError`\r\n\r\nMy system has more than 10GB of available RAM. Looking at the code, I think that it could be related to the way jiwer does the calculation, as it is pasting all the sentences in a single string before calling Levenshtein editops function.\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2078\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2078\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2077","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2077\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2077\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2077\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2077","id":834649536,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk1NDI0MTYw","number":2077,"title":"Bump huggingface_hub version","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-18T10:54:34Z","updated_at":"2021-03-18T11:33:26Z","closed_at":"2021-03-18T11:33:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2077","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2077","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2077.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2077.patch","merged_at":"2021-03-18T11:33:26Z"},"body":"`0.0.2 => 0.0.6`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2077\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2077\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2076","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2076\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2076\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2076\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2076","id":834445296,"node_id":"MDU6SXNzdWU4MzQ0NDUyOTY=","number":2076,"title":"Issue: Dataset download error","user":{"login":"XuhuiZhou","id":20436061,"node_id":"MDQ6VXNlcjIwNDM2MDYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20436061?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/XuhuiZhou","html_url":"https:\/\/github.com\/XuhuiZhou","followers_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/followers","following_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/orgs","repos_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/repos","events_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-18T06:36:06Z","updated_at":"2021-03-22T11:52:31Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The download link in `iwslt2017.py` file does not seem to work anymore.\r\n\r\nFor example, `FileNotFoundError: Couldn't find file at https:\/\/wit3.fbk.eu\/archive\/2017-01-trnted\/texts\/zh\/en\/zh-en.tgz`\r\n\r\nWould be nice if we could modify it script and use the new downloadable link?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2076\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2076\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2075","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2075\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2075\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2075\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2075","id":834301246,"node_id":"MDU6SXNzdWU4MzQzMDEyNDY=","number":2075,"title":"ConnectionError: Couldn't reach common_voice.py","user":{"login":"LifaSun","id":6188893,"node_id":"MDQ6VXNlcjYxODg4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6188893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LifaSun","html_url":"https:\/\/github.com\/LifaSun","followers_url":"https:\/\/api.github.com\/users\/LifaSun\/followers","following_url":"https:\/\/api.github.com\/users\/LifaSun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LifaSun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LifaSun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LifaSun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LifaSun\/orgs","repos_url":"https:\/\/api.github.com\/users\/LifaSun\/repos","events_url":"https:\/\/api.github.com\/users\/LifaSun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LifaSun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-18T01:19:06Z","updated_at":"2021-03-20T10:29:41Z","closed_at":"2021-03-20T10:29:41Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I run: \r\nfrom datasets import load_dataset, load_metric\r\n\r\ncommon_voice_train = load_dataset(\"common_voice\", \"zh-CN\", split=\"train+validation\")\r\ncommon_voice_test = load_dataset(\"common_voice\", \"zh-CN\", split=\"test\")\r\n\r\nGot:\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/common_voice\/common_voice.py\r\n\r\nVersion:\r\n1.4.1\r\n\r\nThanks! @lhoestq @LysandreJik @thomwolf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2075\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2075\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2074","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2074\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2074\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2074\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2074","id":834268463,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk1MTIzMjYw","number":2074,"title":"Fix size categories in YAML Tags","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-03-18T00:02:36Z","updated_at":"2021-03-23T17:11:10Z","closed_at":"2021-03-23T17:11:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2074","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2074","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2074.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2074.patch","merged_at":"2021-03-23T17:11:09Z"},"body":"This PR fixes several `size_categories` in YAML tags and makes them consistent. Additionally, I have added a few more categories after `1M`, up to `1T`. I would like to add that to the streamlit app also.\r\n\r\nThis PR also adds a couple of infos that I found missing.\r\n\r\nThe code for generating this:\r\n```python\r\nfor dataset in sorted(os.listdir('.\/datasets\/')):\r\n if '.' not in dataset and dataset not in ['c4', 'csv', 'downloads', 'cc100', 'ccaligned_multilingual', 'celeb_a', 'chr_en', 'emea', 'glue']:\r\n infos = {}\r\n stats = {}\r\n st = ''\r\n with open(f'datasets\/{dataset}\/README.md') as f:\r\n d = f.read()\r\n start_dash = d.find('---') + 3\r\n end_dash = d[start_dash:].find('---') + 3\r\n rest_text = d[end_dash + 3:]\r\n try:\r\n full_yaml = OmegaConf.create(d[start_dash:end_dash])\r\n readme = OmegaConf.to_container(full_yaml['size_categories'], resolve=True)\r\n except Exception as e:\r\n print(e)\r\n continue \r\n try:\r\n with open(f'datasets\/{dataset}\/dataset_infos.json') as f:\r\n data = json.load(f)\r\n except Exception as e:\r\n print(e)\r\n continue # Skip those without infos.\r\n done_set = set([])\r\n num_keys = len(data.keys())\r\n for keys in data:\r\n # dataset = load_dataset('opus100', f'{dirs}')\r\n total = 0\r\n for split in data[keys]['splits']:\r\n total = total + data[keys]['splits'][split]['num_examples']\r\n if total < 1000:\r\n st += \"- n<1K\" + '\\n'\r\n infos[keys] = [\"n<1K\"]\r\n elif total >= 1000 and total < 10000:\r\n infos[keys] = [\"1K= 10000 and total < 100000:\r\n infos[keys] = [\"10K= 100000 and total < 1000000:\r\n infos[keys] = [\"100K= 1000000 and total < 10000000:\r\n infos[keys] = [\"1M= 10000000 and total < 100000000:\r\n infos[keys] = [\"10M= 100000000 and total < 1000000000:\r\n infos[keys] = [\"100M= 1000000000 and total < 10000000000:\r\n infos[keys] = [\"1B= 10000000000 and total < 100000000000:\r\n infos[keys] = [\"10B= 100000000000 and total < 1000000000000:\r\n infos[keys] = [\"100B1T\"]\r\n done_set = done_set.union(infos[keys])\r\n if (isinstance(readme, list) and list(infos.values())[0] != readme) or (isinstance(readme, dict) and readme != infos):\r\n\r\n print('-' * 30)\r\n print(done_set)\r\n print(f\"Changing Full YAML for {dataset}\")\r\n print(OmegaConf.to_yaml(full_yaml))\r\n\r\n if len(done_set) == 1:\r\n full_yaml['size_categories'] = list(done_set)\r\n else:\r\n full_yaml['size_categories'] = dict([(k, v) for k, v in sorted(infos.items(), key=lambda x: x[0])])\r\n\r\n full_yaml_string = OmegaConf.to_yaml(full_yaml)\r\n print('-' * 30)\r\n print(full_yaml_string)\r\n inp = input('Do you wish to continue?(Y\/N)')\r\n if inp == 'Y':\r\n with open(f'.\/datasets\/{dataset}\/README.md', 'w') as f:\r\n f.write('---\\n')\r\n f.write(full_yaml_string)\r\n f.write('---')\r\n f.write(rest_text)\r\n else:\r\n break\r\n```\r\n\r\nNote that the lower-bound is inclusive. I'm unsure if this is how it is done in the tagging app.\r\n\r\nEDIT:\r\nIt would be great if there was a way to make the task categories consistent too. For this, the streamlit app can look into all the datasets and check for existing categories and show them in the list. This may add some consistency.\r\n\r\nEDIT:\r\nI understand this will not work for cases where only the infos for some of the configs are present, for example: `ccaligned_multingual` has only 5 out of several configs present, and infos has only information about them. Hence, I have skipped a few datasets in the code, if there are more such datasets, then I'll ignore them too.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2074\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2074\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2073","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2073\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2073\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2073\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2073","id":834192501,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk1MDYyMzQ2","number":2073,"title":"Fixes check of TF_AVAILABLE and TORCH_AVAILABLE","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-17T21:28:53Z","updated_at":"2021-03-18T09:09:25Z","closed_at":"2021-03-18T09:09:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2073","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2073","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2073.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2073.patch","merged_at":"2021-03-18T09:09:24Z"},"body":"# What is this PR doing\r\n\r\nThis PR implements the checks if `Tensorflow` and `Pytorch` are available the same way as `transformers` does it. I added the additional checks for the different `Tensorflow` and `torch` versions. #2068 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2073\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2073\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2072","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2072\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2072\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2072\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2072","id":834054837,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk0OTQ5NjA4","number":2072,"title":"Fix docstring issues","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-17T18:13:44Z","updated_at":"2021-03-24T08:20:57Z","closed_at":"2021-03-18T12:41:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2072","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2072","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2072.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2072.patch","merged_at":"2021-03-18T12:41:21Z"},"body":"Fix docstring issues.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2072\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2072\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2071","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2071\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2071\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2071\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2071","id":833950824,"node_id":"MDU6SXNzdWU4MzM5NTA4MjQ=","number":2071,"title":"Multiprocessing is slower than single process","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-17T16:08:58Z","updated_at":"2021-03-18T09:10:23Z","closed_at":"2021-03-18T09:10:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```python\r\n# benchmark_filter.py\r\nimport logging\r\nimport sys\r\nimport time\r\n\r\nfrom datasets import load_dataset, set_caching_enabled\r\n\r\n\r\nif __name__ == \"__main__\":\r\n set_caching_enabled(False)\r\n logging.basicConfig(level=logging.DEBUG)\r\n\r\n bc = load_dataset(\"bookcorpus\")\r\n\r\n now = time.time()\r\n try:\r\n bc[\"train\"].filter(lambda x: len(x[\"text\"]) < 64, num_proc=int(sys.argv[1]))\r\n except Exception as e:\r\n print(f\"cancelled: {e}\")\r\n elapsed = time.time() - now\r\n\r\n print(elapsed)\r\n```\r\n\r\nRunning `python benchmark_filter.py 1` (20min+) is faster than `python benchmark_filter.py 2` (2hrs+)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2071\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2071\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2070","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2070\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2070\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2070\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2070","id":833799035,"node_id":"MDU6SXNzdWU4MzM3OTkwMzU=","number":2070,"title":"ArrowInvalid issue for squad v2 dataset","user":{"login":"MichaelYxWang","id":29818977,"node_id":"MDQ6VXNlcjI5ODE4OTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29818977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MichaelYxWang","html_url":"https:\/\/github.com\/MichaelYxWang","followers_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/followers","following_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/orgs","repos_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/repos","events_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-17T13:51:49Z","updated_at":"2021-08-04T17:57:16Z","closed_at":"2021-08-04T17:57:16Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello, I am using the huggingface official question answering example notebook (https:\/\/colab.research.google.com\/github\/huggingface\/notebooks\/blob\/master\/examples\/question_answering.ipynb). \r\n\r\nIn the prepare_validation_features function, I made some modifications to tokenize a new set of quesions with the original contexts and save them in three different list called candidate_input_dis, candidate_attetion_mask and candidate_token_type_ids. When I try to run the next cell for dataset.map, I got the following error:\r\n\r\n`ArrowInvalid: Column 1 named candidate_attention_mask expected length 1180 but got length 1178`\r\n\r\nMy code is as follows:\r\n\r\n```\r\ndef generate_candidate_questions(examples):\r\n val_questions = examples[\"question\"]\r\n candididate_questions = random.sample(datasets[\"train\"][\"question\"], len(val_questions))\r\n candididate_questions = [x[:max_length] for x in candididate_questions]\r\n return candididate_questions\r\n\r\ndef prepare_validation_features(examples, use_mixing=False):\r\n pad_on_right = tokenizer.padding_side == \"right\"\r\n tokenized_examples = tokenizer(\r\n examples[\"question\" if pad_on_right else \"context\"],\r\n examples[\"context\" if pad_on_right else \"question\"],\r\n truncation=\"only_second\" if pad_on_right else \"only_first\",\r\n max_length=max_length,\r\n stride=doc_stride,\r\n return_overflowing_tokens=True,\r\n return_offsets_mapping=True,\r\n padding=\"max_length\",\r\n )\r\n if use_mixing:\r\n candidate_questions = generate_candidate_questions(examples)\r\n tokenized_candidates = tokenizer(\r\n candidate_questions if pad_on_right else examples[\"context\"],\r\n examples[\"context\"] if pad_on_right else candidate_questions,\r\n truncation=\"only_second\" if pad_on_right else \"only_first\",\r\n max_length=max_length,\r\n stride=doc_stride,\r\n return_overflowing_tokens=True,\r\n return_offsets_mapping=True,\r\n padding=\"max_length\",\r\n )\r\n\r\n sample_mapping = tokenized_examples.pop(\"overflow_to_sample_mapping\")\r\n\r\n tokenized_examples[\"example_id\"] = []\r\n\r\n if use_mixing:\r\n tokenized_examples[\"candidate_input_ids\"] = tokenized_candidates[\"input_ids\"]\r\n tokenized_examples[\"candidate_attention_mask\"] = tokenized_candidates[\"attention_mask\"]\r\n tokenized_examples[\"candidate_token_type_ids\"] = tokenized_candidates[\"token_type_ids\"]\r\n\r\n for i in range(len(tokenized_examples[\"input_ids\"])):\r\n sequence_ids = tokenized_examples.sequence_ids(i)\r\n context_index = 1 if pad_on_right else 0\r\n\r\n sample_index = sample_mapping[i]\r\n tokenized_examples[\"example_id\"].append(examples[\"id\"][sample_index])\r\n\r\n tokenized_examples[\"offset_mapping\"][i] = [\r\n (o if sequence_ids[k] == context_index else None)\r\n for k, o in enumerate(tokenized_examples[\"offset_mapping\"][i])\r\n ]\r\n\r\n return tokenized_examples\r\n\r\n\r\n\r\nvalidation_features = datasets[\"validation\"].map(\r\n lambda xs: prepare_validation_features(xs, True),\r\n batched=True,\r\n remove_columns=datasets[\"validation\"].column_names\r\n)\r\n```\r\n\r\nI guess this might happen because of the batched=True. I see similar issues in this repo related to arrow table length mismatch error, but in their cases, the numbers vary a lot. In my case, this error always happens when the expected length and unexpected length are very close. Thanks for the help!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2070\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2070\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2069","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2069\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2069\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2069\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2069","id":833768926,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk0NzA5ODYw","number":2069,"title":"Add and fix docstring for NamedSplit","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-17T13:19:28Z","updated_at":"2021-03-18T10:27:40Z","closed_at":"2021-03-18T10:27:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2069","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2069","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2069.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2069.patch","merged_at":"2021-03-18T10:27:40Z"},"body":"Add and fix docstring for `NamedSplit`, which was missing.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2069\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2069\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2068","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2068\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2068\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2068\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2068","id":833602832,"node_id":"MDU6SXNzdWU4MzM2MDI4MzI=","number":2068,"title":"PyTorch not available error on SageMaker GPU docker though it is installed ","user":{"login":"sivakhno","id":1651457,"node_id":"MDQ6VXNlcjE2NTE0NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1651457?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sivakhno","html_url":"https:\/\/github.com\/sivakhno","followers_url":"https:\/\/api.github.com\/users\/sivakhno\/followers","following_url":"https:\/\/api.github.com\/users\/sivakhno\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sivakhno\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sivakhno\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sivakhno\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sivakhno\/orgs","repos_url":"https:\/\/api.github.com\/users\/sivakhno\/repos","events_url":"https:\/\/api.github.com\/users\/sivakhno\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sivakhno\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-17T10:04:27Z","updated_at":"2021-06-14T04:47:30Z","closed_at":"2021-06-14T04:47:30Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I get en error when running data loading using SageMaker SDK\r\n\r\n```\r\n File \"main.py\", line 34, in \r\n run_training()\r\n File \"main.py\", line 25, in run_training\r\n dm.setup('fit')\r\n File \"\/opt\/conda\/lib\/python3.6\/site-packages\/pytorch_lightning\/core\/datamodule.py\", line 92, in wrapped_fn\r\n return fn(*args, **kwargs)\r\n File \"\/opt\/ml\/code\/data_module.py\", line 103, in setup\r\n self.dataset[split].set_format(type=\"torch\", columns=self.columns)\r\n File \"\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/fingerprint.py\", line 337, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py\", line 995, in set_format\r\n _ = get_formatter(type, **format_kwargs)\r\nFile \"\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/formatting\/__init__.py\", line 114, in get_formatter\r\n raise _FORMAT_TYPES_ALIASES_UNAVAILABLE[format_type]\r\nValueError: PyTorch needs to be installed to be able to return PyTorch tensors.\r\n```\r\n\r\nwhen trying to execute dataset loading using this notebook https:\/\/github.com\/PyTorchLightning\/pytorch-lightning\/blob\/master\/notebooks\/04-transformers-text-classification.ipynb, specifically lines \r\n\r\n```\r\nself.columns = [c for c in self.dataset[split].column_names if c in self.loader_columns]\r\nself.dataset[split].set_format(type=\"torch\", columns=self.columns)\r\n```\r\n\r\nThe SageMaker docker image used is 763104351884.dkr.ecr.eu-central-1.amazonaws.com\/pytorch-training:1.4.0-gpu-py3 .\r\n\r\nBy running container interactively I have checked that torch loading completes successfully by executing `https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/config.py#L39`. \r\n\r\nAlso as a first line in the data loading module I have \r\n\r\n```\r\nimport os\r\nos.environ[\"USE_TF\"] = \"0\" \r\nos.environ[\"USE_TORCH\"] = \"1\" \r\n````\r\n\r\nBut unfortunately the error stills persists. Any suggestions would be appreciated as I am stack.\r\nMany Thanks! \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2068\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2068\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2067","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2067\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2067\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2067\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2067","id":833559940,"node_id":"MDU6SXNzdWU4MzM1NTk5NDA=","number":2067,"title":"Multiprocessing windows error","user":{"login":"flozi00","id":47894090,"node_id":"MDQ6VXNlcjQ3ODk0MDkw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47894090?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/flozi00","html_url":"https:\/\/github.com\/flozi00","followers_url":"https:\/\/api.github.com\/users\/flozi00\/followers","following_url":"https:\/\/api.github.com\/users\/flozi00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/flozi00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/flozi00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/flozi00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/flozi00\/orgs","repos_url":"https:\/\/api.github.com\/users\/flozi00\/repos","events_url":"https:\/\/api.github.com\/users\/flozi00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/flozi00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-03-17T09:12:28Z","updated_at":"2021-08-04T17:59:08Z","closed_at":"2021-08-04T17:59:08Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As described here https:\/\/huggingface.co\/blog\/fine-tune-xlsr-wav2vec2\r\n\r\nWhen using the num_proc argument on windows the whole Python environment crashes and hanging in loop.\r\nFor example at the map_to_array part.\r\nAn error occures because the cache file already exists and windows throws and error. After this the log crashes into an loop ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2067\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2067\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2066","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2066\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2066\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2066\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2066","id":833480551,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk0NDcwMjEz","number":2066,"title":"Fix docstring rendering of Dataset\/DatasetDict.from_csv args","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-17T07:23:10Z","updated_at":"2021-03-17T09:21:21Z","closed_at":"2021-03-17T09:21:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2066","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2066","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2066.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2066.patch","merged_at":"2021-03-17T09:21:21Z"},"body":"Fix the docstring rendering of Dataset\/DatasetDict.from_csv args.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2066\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2066\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2065","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2065\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2065\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2065\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2065","id":833291432,"node_id":"MDU6SXNzdWU4MzMyOTE0MzI=","number":2065,"title":"Only user permission of saved cache files, not group","user":{"login":"lorr1","id":57237365,"node_id":"MDQ6VXNlcjU3MjM3MzY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57237365?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lorr1","html_url":"https:\/\/github.com\/lorr1","followers_url":"https:\/\/api.github.com\/users\/lorr1\/followers","following_url":"https:\/\/api.github.com\/users\/lorr1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lorr1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lorr1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lorr1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lorr1\/orgs","repos_url":"https:\/\/api.github.com\/users\/lorr1\/repos","events_url":"https:\/\/api.github.com\/users\/lorr1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lorr1\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":19,"created_at":"2021-03-17T00:20:22Z","updated_at":"2021-05-10T06:45:29Z","closed_at":"2021-05-10T06:45:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\n\r\nIt seems when a cached file is saved from calling `dataset.map` for preprocessing, it gets the user permissions and none of the user's group permissions. As we share data files across members of our team, this is causing a bit of an issue as we have to continually reset the permission of the files. Do you know any ways around this or a way to correctly set the permissions?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2065\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2065\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2064","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2064\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2064\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2064\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2064","id":833002360,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk0MDczOTQ1","number":2064,"title":"Fix ted_talks_iwslt version error","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-16T16:43:45Z","updated_at":"2021-03-16T18:00:08Z","closed_at":"2021-03-16T18:00:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2064","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2064","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2064.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2064.patch","merged_at":"2021-03-16T18:00:07Z"},"body":"This PR fixes the bug where the version argument would be passed twice if the dataset configuration was created on the fly.\r\n\r\nFixes #2059 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2064\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2064\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2063","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2063\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2063\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2063\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2063","id":832993705,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk0MDY2NzI5","number":2063,"title":"[Common Voice] Adapt dataset script so that no manual data download is actually needed","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-16T16:33:44Z","updated_at":"2021-03-17T09:42:52Z","closed_at":"2021-03-17T09:42:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2063","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2063","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2063.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2063.patch","merged_at":"2021-03-17T09:42:37Z"},"body":"This PR changes the dataset script so that no manual data dir is needed anymore. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2063\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2063\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2062","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2062\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2062\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2062\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2062","id":832625483,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkzNzUyNTMz","number":2062,"title":"docs: fix missing quotation","user":{"login":"neal2018","id":46561493,"node_id":"MDQ6VXNlcjQ2NTYxNDkz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46561493?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/neal2018","html_url":"https:\/\/github.com\/neal2018","followers_url":"https:\/\/api.github.com\/users\/neal2018\/followers","following_url":"https:\/\/api.github.com\/users\/neal2018\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/neal2018\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/neal2018\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/neal2018\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/neal2018\/orgs","repos_url":"https:\/\/api.github.com\/users\/neal2018\/repos","events_url":"https:\/\/api.github.com\/users\/neal2018\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/neal2018\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-16T10:07:54Z","updated_at":"2021-03-17T09:21:57Z","closed_at":"2021-03-17T09:21:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2062","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2062","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2062.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2062.patch","merged_at":"2021-03-17T09:21:56Z"},"body":"The json code misses a quote","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2062\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2062\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2061","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2061\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2061\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2061\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2061","id":832596228,"node_id":"MDU6SXNzdWU4MzI1OTYyMjg=","number":2061,"title":"Cannot load udpos subsets from xtreme dataset using load_dataset()","user":{"login":"adzcodez","id":55791365,"node_id":"MDQ6VXNlcjU1NzkxMzY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55791365?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adzcodez","html_url":"https:\/\/github.com\/adzcodez","followers_url":"https:\/\/api.github.com\/users\/adzcodez\/followers","following_url":"https:\/\/api.github.com\/users\/adzcodez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adzcodez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adzcodez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adzcodez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adzcodez\/orgs","repos_url":"https:\/\/api.github.com\/users\/adzcodez\/repos","events_url":"https:\/\/api.github.com\/users\/adzcodez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adzcodez\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-16T09:32:13Z","updated_at":"2021-06-18T11:54:11Z","closed_at":"2021-06-18T11:54:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello, \r\n\r\nI am trying to load the udpos English subset from xtreme dataset, but this faces an error during loading. I am using datasets v1.4.1, pip install. I have tried with other udpos languages which also fail, though loading a different subset altogether (such as XNLI) has no issue. I have also tried on Colab and faced the same error. \r\n\r\nReprex is: \r\n\r\n`from datasets import load_dataset `\r\n`dataset = load_dataset('xtreme', 'udpos.English')`\r\n\r\nThe error is: \r\n`KeyError: '_'`\r\n\r\nThe full traceback is: \r\nKeyError Traceback (most recent call last)\r\n in \r\n 1 from datasets import load_dataset\r\n----> 2 dataset = load_dataset('xtreme', 'udpos.English')\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, **config_kwargs)\r\n 738 \r\n 739 # Download and prepare data\r\n--> 740 builder_instance.download_and_prepare(\r\n 741 download_config=download_config,\r\n 742 download_mode=download_mode,\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 576 logger.warning(\"HF google storage unreachable. Downloading and preparing it from source\")\r\n 577 if not downloaded_from_gcs:\r\n--> 578 self._download_and_prepare(\r\n 579 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 580 )\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 654 try:\r\n 655 # Prepare split will record examples associated to the split\r\n--> 656 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 657 except OSError as e:\r\n 658 raise OSError(\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\builder.py in _prepare_split(self, split_generator)\r\n 977 generator, unit=\" examples\", total=split_info.num_examples, leave=False, disable=not_verbose\r\n 978 ):\r\n--> 979 example = self.info.features.encode_example(record)\r\n 980 writer.write(example)\r\n 981 finally:\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in encode_example(self, example)\r\n 946 def encode_example(self, example):\r\n 947 example = cast_to_python_objects(example)\r\n--> 948 return encode_nested_example(self, example)\r\n 949 \r\n 950 def encode_batch(self, batch):\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in encode_nested_example(schema, obj)\r\n 840 # Nested structures: we allow dict, list\/tuples, sequences\r\n 841 if isinstance(schema, dict):\r\n--> 842 return {\r\n 843 k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n 844 }\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in (.0)\r\n 841 if isinstance(schema, dict):\r\n 842 return {\r\n--> 843 k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n 844 }\r\n 845 elif isinstance(schema, (list, tuple)):\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in encode_nested_example(schema, obj)\r\n 868 # ClassLabel will convert from string to int, TranslationVariableLanguages does some checks\r\n 869 elif isinstance(schema, (ClassLabel, TranslationVariableLanguages, Value, _ArrayXD)):\r\n--> 870 return schema.encode_example(obj)\r\n 871 # Other object should be directly convertible to a native Arrow type (like Translation and Translation)\r\n 872 return obj\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in encode_example(self, example_data)\r\n 647 # If a string is given, convert to associated integer\r\n 648 if isinstance(example_data, str):\r\n--> 649 example_data = self.str2int(example_data)\r\n 650 \r\n 651 # Allowing -1 to mean no label.\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in str2int(self, values)\r\n 605 if value not in self._str2int:\r\n 606 value = value.strip()\r\n--> 607 output.append(self._str2int[str(value)])\r\n 608 else:\r\n 609 # No names provided, try to integerize\r\n\r\nKeyError: '_'\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2061\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2061\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2060","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2060\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2060\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2060\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2060","id":832588591,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkzNzIxNzcx","number":2060,"title":"Filtering refactor","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"assignees":[{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":10,"created_at":"2021-03-16T09:23:30Z","updated_at":"2021-10-13T09:09:04Z","closed_at":"2021-10-13T09:09:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2060","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2060","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2060.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2060.patch","merged_at":null},"body":"fix https:\/\/github.com\/huggingface\/datasets\/issues\/2032\r\n\r\nbenchmarking is somewhat inconclusive, currently running on `book_corpus` with:\r\n\r\n```python\r\n bc = load_dataset(\"bookcorpus\")\r\n now = time.time()\r\n bc.filter(lambda x: len(x[\"text\"]) < 64)\r\n elapsed = time.time() - now\r\n print(elapsed)\r\n```\r\n\r\nthis branch does it in 233 seconds, master in 1409 seconds.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2060\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2060\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2059","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2059\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2059\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2059\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2059","id":832579156,"node_id":"MDU6SXNzdWU4MzI1NzkxNTY=","number":2059,"title":"Error while following docs to load the `ted_talks_iwslt` dataset","user":{"login":"ekdnam","id":40426312,"node_id":"MDQ6VXNlcjQwNDI2MzEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40426312?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ekdnam","html_url":"https:\/\/github.com\/ekdnam","followers_url":"https:\/\/api.github.com\/users\/ekdnam\/followers","following_url":"https:\/\/api.github.com\/users\/ekdnam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ekdnam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ekdnam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ekdnam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ekdnam\/orgs","repos_url":"https:\/\/api.github.com\/users\/ekdnam\/repos","events_url":"https:\/\/api.github.com\/users\/ekdnam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ekdnam\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-16T09:12:19Z","updated_at":"2021-03-16T18:00:31Z","closed_at":"2021-03-16T18:00:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am currently trying to load the `ted_talks_iwslt` dataset into google colab.\r\n\r\nThe [docs](https:\/\/huggingface.co\/datasets\/ted_talks_iwslt) mention the following way of doing so.\r\n\r\n```python\r\ndataset = load_dataset(\"ted_talks_iwslt\", language_pair=(\"it\", \"pl\"), year=\"2014\")\r\n```\r\n\r\nExecuting it results in the error attached below.\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in ()\r\n----> 1 dataset = load_dataset(\"ted_talks_iwslt\", language_pair=(\"it\", \"pl\"), year=\"2014\")\r\n\r\n4 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, **config_kwargs)\r\n 730 hash=hash,\r\n 731 features=features,\r\n--> 732 **config_kwargs,\r\n 733 )\r\n 734 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in __init__(self, writer_batch_size, *args, **kwargs)\r\n 927 \r\n 928 def __init__(self, *args, writer_batch_size=None, **kwargs):\r\n--> 929 super(GeneratorBasedBuilder, self).__init__(*args, **kwargs)\r\n 930 # Batch size used by the ArrowWriter\r\n 931 # It defines the number of samples that are kept in memory before writing them\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in __init__(self, cache_dir, name, hash, features, **config_kwargs)\r\n 241 name,\r\n 242 custom_features=features,\r\n--> 243 **config_kwargs,\r\n 244 )\r\n 245 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in _create_builder_config(self, name, custom_features, **config_kwargs)\r\n 337 if \"version\" not in config_kwargs and hasattr(self, \"VERSION\") and self.VERSION:\r\n 338 config_kwargs[\"version\"] = self.VERSION\r\n--> 339 builder_config = self.BUILDER_CONFIG_CLASS(**config_kwargs)\r\n 340 \r\n 341 # otherwise use the config_kwargs to overwrite the attributes\r\n\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/ted_talks_iwslt\/024d06b1376b361e59245c5878ab8acf9a7576d765f2d0077f61751158e60914\/ted_talks_iwslt.py in __init__(self, language_pair, year, **kwargs)\r\n 219 description=description,\r\n 220 version=datasets.Version(\"1.1.0\", \"\"),\r\n--> 221 **kwargs,\r\n 222 )\r\n 223 \r\n\r\nTypeError: __init__() got multiple values for keyword argument 'version'\r\n```\r\n\r\nHow to resolve this? \r\n\r\nPS: Thanks a lot @huggingface team for creating this great library!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2059\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2059\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2058","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2058\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2058\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2058\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2058","id":832159844,"node_id":"MDU6SXNzdWU4MzIxNTk4NDQ=","number":2058,"title":"Is it possible to convert a `tfds` to HuggingFace `dataset`?","user":{"login":"abarbosa94","id":6608232,"node_id":"MDQ6VXNlcjY2MDgyMzI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6608232?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abarbosa94","html_url":"https:\/\/github.com\/abarbosa94","followers_url":"https:\/\/api.github.com\/users\/abarbosa94\/followers","following_url":"https:\/\/api.github.com\/users\/abarbosa94\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abarbosa94\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abarbosa94\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abarbosa94\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abarbosa94\/orgs","repos_url":"https:\/\/api.github.com\/users\/abarbosa94\/repos","events_url":"https:\/\/api.github.com\/users\/abarbosa94\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abarbosa94\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-15T20:18:47Z","updated_at":"2021-03-15T20:18:47Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I was having some weird bugs with `C4`dataset version of HuggingFace, so I decided to try to download `C4`from `tfds`. I would like to know if it is possible to convert a tfds dataset to HuggingFace dataset format :)\r\n\r\nI can also open a new issue reporting the bug I'm receiving with `datasets.load_dataset('c4','en')` in the future if you think that it would be useful.\r\n\r\nThanks!\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2058\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2058\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2057","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2057\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2057\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2057\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2057","id":832120522,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkzMzMzMjM0","number":2057,"title":"update link to ZEST dataset","user":{"login":"matt-peters","id":619844,"node_id":"MDQ6VXNlcjYxOTg0NA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/619844?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/matt-peters","html_url":"https:\/\/github.com\/matt-peters","followers_url":"https:\/\/api.github.com\/users\/matt-peters\/followers","following_url":"https:\/\/api.github.com\/users\/matt-peters\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/matt-peters\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/matt-peters\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/matt-peters\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/matt-peters\/orgs","repos_url":"https:\/\/api.github.com\/users\/matt-peters\/repos","events_url":"https:\/\/api.github.com\/users\/matt-peters\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/matt-peters\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-15T19:22:57Z","updated_at":"2021-03-16T17:06:28Z","closed_at":"2021-03-16T17:06:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2057","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2057","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2057.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2057.patch","merged_at":"2021-03-16T17:06:28Z"},"body":"Updating the link as the original one is no longer working. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2057\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2057\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2056","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2056\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2056\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2056\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2056","id":831718397,"node_id":"MDU6SXNzdWU4MzE3MTgzOTc=","number":2056,"title":"issue with opus100\/en-fr dataset ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-15T11:32:42Z","updated_at":"2021-03-16T15:49:00Z","closed_at":"2021-03-16T15:48:59Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am running run_mlm.py code of huggingface repo with opus100\/fr-en pair, I am getting this error, note that this error occurs for only this pairs and not the other pairs. Any idea why this is occurring? and how I can solve this? \r\n\r\nThanks a lot @lhoestq for your help in advance.\r\n\r\n`\r\nthread '' panicked at 'index out of bounds: the len is 617 but the index is 617', \/__w\/tokenizers\/tokenizers\/tokenizers\/src\/tokenizer\/normalizer.rs:382:21\r\nnote: run with `RUST_BACKTRACE=1` environment variable to display a backtrace\r\n 63%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 626\/1000 [00:27<00:16, 22.69ba\/s]\r\n\r\nTraceback (most recent call last):\r\n File \"run_mlm.py\", line 550, in \r\n main()\r\n File \"run_mlm.py\", line 412, in main\r\n in zip(data_args.dataset_name, data_args.dataset_config_name)]\r\n File \"run_mlm.py\", line 411, in \r\n logger) for dataset_name, dataset_config_name\\\r\n File \"\/user\/dara\/dev\/codes\/seq2seq\/data\/tokenize_datasets.py\", line 96, in get_tokenized_dataset\r\n load_from_cache_file=not data_args.overwrite_cache,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py\", line 448, in map\r\n for k, dataset in self.items()\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py\", line 448, in \r\n for k, dataset in self.items()\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1309, in map\r\n update_data=update_data,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 204, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 337, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1574, in _map_single\r\n batch, indices, check_same_num_examples=len(self.list_indexes()) > 0, offset=offset\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1490, in apply_function_on_filtered_inputs\r\n function(*fn_args, effective_indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n File \"\/user\/dara\/dev\/codes\/seq2seq\/data\/tokenize_datasets.py\", line 89, in tokenize_function\r\n return tokenizer(examples[text_column_name], return_special_tokens_mask=True)\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 2347, in __call__\r\n **kwargs,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 2532, in batch_encode_plus\r\n **kwargs,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_fast.py\", line 384, in _batch_encode_plus\r\n is_pretokenized=is_split_into_words,\r\npyo3_runtime.PanicException: index out of bounds: the len is 617 but the index is 617\r\n\r\n`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2056\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2056\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2055","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2055\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2055\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2055\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2055","id":831684312,"node_id":"MDU6SXNzdWU4MzE2ODQzMTI=","number":2055,"title":"is there a way to override a dataset object saved with save_to_disk?","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-15T10:50:53Z","updated_at":"2021-03-22T04:06:17Z","closed_at":"2021-03-22T04:06:17Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"At the moment when I use save_to_disk, it uses the arbitrary name for the arrow file. Is there a way to override such an object? ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2055\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2055\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2054","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2054\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2054\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2054\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2054","id":831597665,"node_id":"MDU6SXNzdWU4MzE1OTc2NjU=","number":2054,"title":"Could not find file for ZEST dataset","user":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-15T09:11:58Z","updated_at":"2021-05-03T09:30:24Z","closed_at":"2021-05-03T09:30:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to use zest dataset from Allen AI using below code in colab,\r\n```\r\n!pip install -q datasets\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"zest\")\r\n```\r\n\r\nI am getting the following error,\r\n```\r\nUsing custom data configuration default\r\n\r\nDownloading and preparing dataset zest\/default (download: 5.53 MiB, generated: 19.96 MiB, post-processed: Unknown size, total: 25.48 MiB) to \/root\/.cache\/huggingface\/datasets\/zest\/default\/0.0.0\/1f7a230fbfc964d979bbca0f0130fbab3259fce547ee758ad8aa4f9c9bec6cca...\r\n---------------------------------------------------------------------------\r\nFileNotFoundError Traceback (most recent call last)\r\n in ()\r\n 1 from datasets import load_dataset\r\n 2 \r\n----> 3 dataset = load_dataset(\"zest\")\r\n\r\n9 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag, max_retries, use_auth_token)\r\n 612 )\r\n 613 elif response is not None and response.status_code == 404:\r\n--> 614 raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\n 615 _raise_if_offline_mode_is_enabled(f\"Tried to reach {url}\")\r\n 616 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n\r\nFileNotFoundError: Couldn't find file at https:\/\/ai2-datasets.s3-us-west-2.amazonaws.com\/zest\/zest.zip\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2054\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2054\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2053","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2053\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2053\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2053\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2053","id":831151728,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkyNTM4ODY2","number":2053,"title":"Add bAbI QA tasks","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-14T13:04:39Z","updated_at":"2021-03-29T12:41:48Z","closed_at":"2021-03-29T12:41:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2053","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2053","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2053.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2053.patch","merged_at":"2021-03-29T12:41:48Z"},"body":"- **Name:** *The (20) QA bAbI tasks*\r\n- **Description:** *The (20) QA bAbI tasks are a set of proxy tasks that evaluate reading comprehension via question answering. Our tasks measure understanding in several ways: whether a system is able to answer questions via chaining facts, simple induction, deduction and many more. The tasks are designed to be prerequisites for any system that aims to be capable of conversing with a human. The aim is to classify these tasks into skill sets,so that researchers can identify (and then rectify) the failings of their systems.*\r\n- **Paper:** [arXiv](https:\/\/arxiv.org\/pdf\/1502.05698.pdf)\r\n- **Data:** [Facebook Research Page](https:\/\/research.fb.com\/downloads\/babi\/)\r\n- **Motivation:** This is a unique dataset with story-based Question Answering. It is a part of the `bAbI` project by Facebook Research.\r\n\r\n**Note**: I have currently added all the 160 configs. If this seems impractical, I can keep only a few. While each `dummy_data.zip` weighs a few KBs, overall it is around 1.3MB for all configurations. This is problematic. Let me know what is to be done.\r\n\r\nThanks :)\r\n\r\n\r\n### Checkbox\r\n\r\n- [x] Create the dataset script `\/datasets\/my_dataset\/my_dataset.py` using the template\r\n- [x] Fill the `_DESCRIPTION` and `_CITATION` variables\r\n- [x] Implement `_infos()`, `_split_generators()` and `_generate_examples()`\r\n- [x] Make sure that the `BUILDER_CONFIGS` class attribute is filled with the different configurations of the dataset and that the `BUILDER_CONFIG_CLASS` is specified if there is a custom config class.\r\n- [x] Generate the metadata file `dataset_infos.json` for all configurations\r\n- [x] Generate the dummy data `dummy_data.zip` files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card `README.md` using the template : fill the tags and the various paragraphs\r\n- [x] Both tests for the real data and the dummy data pass.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2053\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2053\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2052","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2052\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2052\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2052\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2052","id":831135704,"node_id":"MDU6SXNzdWU4MzExMzU3MDQ=","number":2052,"title":"Timit_asr dataset repeats examples","user":{"login":"fermaat","id":7583522,"node_id":"MDQ6VXNlcjc1ODM1MjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7583522?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fermaat","html_url":"https:\/\/github.com\/fermaat","followers_url":"https:\/\/api.github.com\/users\/fermaat\/followers","following_url":"https:\/\/api.github.com\/users\/fermaat\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fermaat\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fermaat\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fermaat\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fermaat\/orgs","repos_url":"https:\/\/api.github.com\/users\/fermaat\/repos","events_url":"https:\/\/api.github.com\/users\/fermaat\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fermaat\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-14T11:43:43Z","updated_at":"2021-03-15T10:37:16Z","closed_at":"2021-03-15T10:37:16Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Summary\r\n\r\nWhen loading timit_asr dataset on datasets 1.4+, every row in the dataset is the same\r\nSteps to reproduce\r\n\r\nAs an example, on this code there is the text from the training part:\r\n\r\nCode snippet:\r\n```\r\nfrom datasets import load_dataset, load_metric\r\n\r\ntimit = load_dataset(\"timit_asr\")\r\ntimit['train']['text']\r\n#['Would such an act of refusal be useful?',\r\n# 'Would such an act of refusal be useful?',\r\n# 'Would such an act of refusal be useful?',\r\n# 'Would such an act of refusal be useful?',\r\n# 'Would such an act of refusal be useful?',\r\n# 'Would such an act of refusal be useful?',\r\n```\r\nThe same behavior happens for other columns\r\n\r\nExpected behavior:\r\n\r\nDifferent info on the actual timit_asr dataset\r\n\r\nActual behavior:\r\n\r\nWhen loading timit_asr dataset on datasets 1.4+, every row in the dataset is the same. I've checked datasets 1.3 and the rows are different\r\nDebug info\r\n\r\n Streamlit version: (get it with $ streamlit version)\r\n Python version: Python 3.6.12\r\n Using Conda? PipEnv? PyEnv? Pex? Using pip\r\n OS version: Centos-release-7-9.2009.1.el7.centos.x86_64\r\n\r\nAdditional information\r\n\r\nYou can check the same behavior on https:\/\/huggingface.co\/datasets\/viewer\/?dataset=timit_asr","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2052\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2052\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2051","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2051\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2051\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2051\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2051","id":831027021,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkyNDQ2MDU1","number":2051,"title":"Add MDD Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-14T00:01:05Z","updated_at":"2021-03-19T11:15:44Z","closed_at":"2021-03-19T10:31:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2051","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2051","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2051.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2051.patch","merged_at":"2021-03-19T10:31:59Z"},"body":"- **Name:** *MDD Dataset*\r\n- **Description:** The Movie Dialog dataset (MDD) is designed to measure how well models can perform at goal and non-goal orientated dialog centered around the topic of movies (question answering, recommendation and discussion), from various movie reviews sources such as MovieLens and OMDb.\r\n- **Paper:** [arXiv](https:\/\/arxiv.org\/pdf\/1511.06931.pdf)\r\n- **Data:** https:\/\/research.fb.com\/downloads\/babi\/\r\n- **Motivation:** This is one of the popular dialog datasets, a part of Facebook Research's \"bAbI project\".\r\n\r\n### Checkbox\r\n\r\n- [x] Create the dataset script `\/datasets\/my_dataset\/my_dataset.py` using the template\r\n- [x] Fill the `_DESCRIPTION` and `_CITATION` variables\r\n- [x] Implement `_infos()`, `_split_generators()` and `_generate_examples()`\r\n- [x] Make sure that the `BUILDER_CONFIGS` class attribute is filled with the different configurations of the dataset and that the `BUILDER_CONFIG_CLASS` is specified if there is a custom config class.\r\n- [x] Generate the metadata file `dataset_infos.json` for all configurations\r\n- [x] Generate the dummy data `dummy_data.zip` files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card `README.md` using the template : fill the tags and the various paragraphs\r\n- [x] Both tests for the real data and the dummy data pass.\r\n\r\n\r\n**Note**: I haven't included the following from the data files: `entities` (the file containing list of all entities in the first three subtasks), `dictionary`(the dictionary of words they use in their models), `movie_kb`(contains the knowledge base of information about the movies, actors and other entities that are mentioned in the dialogs). Please let me know if those are needed, and if yes, should I make separate configurations for them?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2051\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2051\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2050","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2050\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2050\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2050\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2050","id":831006551,"node_id":"MDU6SXNzdWU4MzEwMDY1NTE=","number":2050,"title":"Build custom dataset to fine-tune Wav2Vec2","user":{"login":"Omarnabk","id":72882909,"node_id":"MDQ6VXNlcjcyODgyOTA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/72882909?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Omarnabk","html_url":"https:\/\/github.com\/Omarnabk","followers_url":"https:\/\/api.github.com\/users\/Omarnabk\/followers","following_url":"https:\/\/api.github.com\/users\/Omarnabk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Omarnabk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Omarnabk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Omarnabk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Omarnabk\/orgs","repos_url":"https:\/\/api.github.com\/users\/Omarnabk\/repos","events_url":"https:\/\/api.github.com\/users\/Omarnabk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Omarnabk\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-13T22:01:10Z","updated_at":"2021-03-15T09:27:28Z","closed_at":"2021-03-15T09:27:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Thank you for your recent tutorial on how to finetune Wav2Vec2 on a custom dataset. The example you gave here (https:\/\/huggingface.co\/blog\/fine-tune-xlsr-wav2vec2) was on the CommonVoice dataset. However, what if I want to load my own dataset? I have a manifest (transcript and their audio files) in a JSON file. \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2050\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2050\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2049","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2049\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2049\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2049\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2049","id":830978687,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkyNDE2MzQ0","number":2049,"title":"Fix text-classification tags","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-13T19:51:42Z","updated_at":"2021-03-16T15:47:46Z","closed_at":"2021-03-16T15:47:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2049","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2049","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2049.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2049.patch","merged_at":"2021-03-16T15:47:46Z"},"body":"There are different tags for text classification right now: `text-classification` and `text_classification`:\r\n![image](https:\/\/user-images.githubusercontent.com\/29076344\/111042457-856bdf00-8463-11eb-93c9-50a30106a1a1.png).\r\n\r\nThis PR fixes it.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2049\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2049\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2048","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2048\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2048\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2048\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2048","id":830953431,"node_id":"MDU6SXNzdWU4MzA5NTM0MzE=","number":2048,"title":"github is not always available - probably need a back up","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-13T18:03:32Z","updated_at":"2021-03-13T18:03:32Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Yesterday morning github wasn't working:\r\n\r\n```\r\n:\/tmp$ wget https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.4.1\/metrics\/sacrebleu\/sacrebleu.py--2021-03-12 18:35:59-- https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.4.1\/metrics\/sacrebleu\/sacrebleu.py\r\nResolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.111.133, 185.199.109.133, ...\r\nConnecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\r\nHTTP request sent, awaiting response... 500 Internal Server Error\r\n2021-03-12 18:36:11 ERROR 500: Internal Server Error.\r\n```\r\n\r\nSuggestion: have a failover system and replicate the data on another system and reach there if gh isn't reachable? perhaps gh can be a master and the replicate a slave - so there is only one true source.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2048\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2048\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2047","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2047\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2047\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2047\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2047","id":830626430,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkyMTI2NzQ3","number":2047,"title":"Multilingual dIalogAct benchMark (miam)","user":{"login":"eusip","id":1551356,"node_id":"MDQ6VXNlcjE1NTEzNTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1551356?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eusip","html_url":"https:\/\/github.com\/eusip","followers_url":"https:\/\/api.github.com\/users\/eusip\/followers","following_url":"https:\/\/api.github.com\/users\/eusip\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eusip\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eusip\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eusip\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eusip\/orgs","repos_url":"https:\/\/api.github.com\/users\/eusip\/repos","events_url":"https:\/\/api.github.com\/users\/eusip\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eusip\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-12T23:02:55Z","updated_at":"2021-03-23T10:36:34Z","closed_at":"2021-03-19T10:47:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2047","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2047","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2047.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2047.patch","merged_at":"2021-03-19T10:47:13Z"},"body":"My collaborators (@EmileChapuis, @PierreColombo) and I within the Affective Computing team at Telecom Paris would like to anonymously publish the miam dataset. It is assocated with a publication currently under review. We will update the dataset with full citations once the review period is over.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2047\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2047\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2046","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2046\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2046\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2046\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2046","id":830423033,"node_id":"MDU6SXNzdWU4MzA0MjMwMzM=","number":2046,"title":"add_faisis_index gets very slow when doing it interatively ","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2021-03-12T20:27:18Z","updated_at":"2021-03-24T22:29:11Z","closed_at":"2021-03-24T22:29:11Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As the below code suggests, I want to run add_faisis_index in every nth interaction from the training loop. I have 7.2 million documents. Usually, it takes 2.5 hours (if I run an as a separate process similar to the script given in rag\/use_own_knowleldge_dataset.py). Now, this takes usually 5hrs. Is this normal? Any way to make this process faster? \r\n\r\n@lhoestq \r\n\r\n```\r\n def training_step(self, batch, batch_idx) -> Dict:\r\n\r\n \r\n if (not batch_idx==0) and (batch_idx%5==0):\r\n\r\n print(\"******************************************************\")\r\n ctx_encoder=self.trainer.model.module.module.model.rag.ctx_encoder\r\n model_copy =type(ctx_encoder)(self.config_dpr) # get a new instance #this will be load in the CPU\r\n model_copy.load_state_dict(ctx_encoder.state_dict()) # copy weights and stuff\r\n\r\n\r\n list_of_gpus = ['cuda:2','cuda:3']\r\n c_dir='\/custom\/cache\/dir'\r\n\r\n kb_dataset = load_dataset(\"csv\", data_files=[self.custom_config.csv_path], split=\"train\", delimiter=\"\\t\", column_names=[\"title\", \"text\"],cache_dir=c_dir) \r\n\r\n print(kb_dataset)\r\n\r\n \r\n n=len(list_of_gpus) #nunber of dedicated GPUs\r\n kb_list=[kb_dataset.shard(n, i, contiguous=True) for i in range(n)]\r\n\r\n #kb_dataset.save_to_disk('\/hpc\/gsir059\/MY-Test\/RAY\/transformers\/examples\/research_projects\/rag\/haha-dir')\r\n\r\n\r\n print(self.trainer.global_rank)\r\n dataset_shards = self.re_encode_kb(model_copy.to(device=list_of_gpus[self.trainer.global_rank]),kb_list[self.trainer.global_rank])\r\n output = [None for _ in list_of_gpus]\r\n\r\n #self.trainer.accelerator_connector.accelerator.barrier(\"embedding_process\")\r\n dist.all_gather_object(output, dataset_shards)\r\n \r\n\r\n #This creation and re-initlaization of the new index\r\n if (self.trainer.global_rank==0): #saving will be done in the main process \r\n \r\n combined_dataset = concatenate_datasets(output)\r\n \r\n passages_path =self.config.passages_path\r\n\r\n logger.info(\"saving the dataset with \")\r\n #combined_dataset.save_to_disk('\/hpc\/gsir059\/MY-Test\/RAY\/transformers\/examples\/research_projects\/rag\/MY-Passage')\r\n combined_dataset.save_to_disk(passages_path)\r\n logger.info(\"Add faiss index to the dataset that consist of embeddings\") \r\n\r\n \r\n embedding_dataset=combined_dataset\r\n index = faiss.IndexHNSWFlat(768, 128, faiss.METRIC_INNER_PRODUCT)\r\n embedding_dataset.add_faiss_index(\"embeddings\", custom_index=index)\r\n\r\n embedding_dataset.get_index(\"embeddings\").save(self.config.index_path)\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2046\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2046\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2045","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2045\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2045\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2045\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2045","id":830351527,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxODc2Mjcz","number":2045,"title":"Preserve column ordering in Dataset.rename_column","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-12T18:26:47Z","updated_at":"2021-03-16T14:48:05Z","closed_at":"2021-03-16T14:35:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2045","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2045","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2045.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2045.patch","merged_at":"2021-03-16T14:35:05Z"},"body":"Currently `Dataset.rename_column` doesn't necessarily preserve the order of the columns:\r\n```python\r\n>>> from datasets import Dataset\r\n>>> d = Dataset.from_dict({'sentences': [\"s1\", \"s2\"], 'label': [0, 1]})\r\n>>> d\r\nDataset({\r\n features: ['sentences', 'label'],\r\n num_rows: 2\r\n})\r\n>>> d.rename_column('sentences', 'text')\r\nDataset({\r\n features: ['label', 'text'],\r\n num_rows: 2\r\n})\r\n```\r\nThis PR fixes this.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2045\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2045\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2044","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2044\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2044\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2044\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2044","id":830339905,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxODY2NzM1","number":2044,"title":"Add CBT dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-12T18:04:19Z","updated_at":"2021-03-19T11:10:13Z","closed_at":"2021-03-19T10:29:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2044","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2044","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2044.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2044.patch","merged_at":"2021-03-19T10:29:15Z"},"body":"This PR adds the [CBT Dataset](https:\/\/arxiv.org\/abs\/1511.02301).\r\n\r\nNote that I have also added the `raw` dataset as a separate configuration. I couldn't find a suitable \"task\" for it in YAML tags.\r\n\r\nThe dummy files have one example each, as the examples are slightly big. For `raw` dataset, I just used top few lines, because they are entire books and would take up a lot of space.\r\n\r\nLet me know in case of any issues.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2044\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2044\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2043","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2043\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2043\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2043\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2043","id":830279098,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxODE1ODAz","number":2043,"title":"Support pickle protocol for dataset splits defined as ReadInstruction","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-12T16:35:11Z","updated_at":"2021-03-16T14:25:38Z","closed_at":"2021-03-16T14:05:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2043","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2043","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2043.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2043.patch","merged_at":"2021-03-16T14:05:05Z"},"body":"Fixes #2022 (+ some style fixes) ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2043\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2043\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2042","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2042\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2042\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2042\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2042","id":830190276,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxNzQwNzQ3","number":2042,"title":"Fix arrow memory checks issue in tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-12T14:49:52Z","updated_at":"2021-03-12T15:04:23Z","closed_at":"2021-03-12T15:04:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2042","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2042","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2042.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2042.patch","merged_at":"2021-03-12T15:04:22Z"},"body":"The tests currently fail on `master` because the arrow memory verification doesn't return the expected memory evolution when loading an arrow table in memory.\r\nFrom my experiments, the tests fail only when the full test suite is ran.\r\nThis made me think that maybe some arrow objects from other tests were not freeing their memory until they do and cause the memory verifications to fail in other tests.\r\n\r\nCollecting the garbage collector before checking the arrow memory usage seems to fix this issue.\r\nI added a context manager `assert_arrow_memory_increases` that we can use in tests and that deals with the gc.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2042\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2042\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2041","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2041\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2041\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2041\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2041","id":830180803,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxNzMyNzMw","number":2041,"title":"Doc2dial update data_infos and data_loaders","user":{"login":"songfeng","id":2062185,"node_id":"MDQ6VXNlcjIwNjIxODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2062185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/songfeng","html_url":"https:\/\/github.com\/songfeng","followers_url":"https:\/\/api.github.com\/users\/songfeng\/followers","following_url":"https:\/\/api.github.com\/users\/songfeng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/songfeng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/songfeng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/songfeng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/songfeng\/orgs","repos_url":"https:\/\/api.github.com\/users\/songfeng\/repos","events_url":"https:\/\/api.github.com\/users\/songfeng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/songfeng\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-12T14:39:29Z","updated_at":"2021-03-16T11:09:20Z","closed_at":"2021-03-16T11:09:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2041","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2041","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2041.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2041.patch","merged_at":"2021-03-16T11:09:20Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2041\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2041\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2040","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2040\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2040\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2040\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2040","id":830169387,"node_id":"MDU6SXNzdWU4MzAxNjkzODc=","number":2040,"title":"ValueError: datasets' indices [1] come from memory and datasets' indices [0] come from disk","user":{"login":"simonschoe","id":53626067,"node_id":"MDQ6VXNlcjUzNjI2MDY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53626067?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/simonschoe","html_url":"https:\/\/github.com\/simonschoe","followers_url":"https:\/\/api.github.com\/users\/simonschoe\/followers","following_url":"https:\/\/api.github.com\/users\/simonschoe\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/simonschoe\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/simonschoe\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/simonschoe\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/simonschoe\/orgs","repos_url":"https:\/\/api.github.com\/users\/simonschoe\/repos","events_url":"https:\/\/api.github.com\/users\/simonschoe\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/simonschoe\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-12T14:27:00Z","updated_at":"2021-08-04T18:00:43Z","closed_at":"2021-08-04T18:00:43Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi there,\r\n\r\nI am trying to concat two datasets that I've previously saved to disk via `save_to_disk()` like so (note that both are saved as `DataDict`, `PATH_DATA_CLS_*` are `Path`-objects):\r\n```python\r\nconcatenate_datasets([load_from_disk(PATH_DATA_CLS_A)['train'], load_from_disk(PATH_DATA_CLS_B)['train']])\r\n```\r\nYielding the following error:\r\n```python\r\nValueError: Datasets' indices should ALL come from memory, or should ALL come from disk.\r\nHowever datasets' indices [1] come from memory and datasets' indices [0] come from disk.\r\n```\r\nBeen trying to solve this for quite some time now. Both `DataDict` have been created by reading in a `csv` via `load_dataset` and subsequently processed using the various `datasets` methods (i.e. filter, map, remove col, rename col). Can't figure out tho...\r\n\r\n`load_from_disk(PATH_DATA_CLS_A)['train']` yields:\r\n```python\r\nDataset({\r\n features: ['labels', 'text'],\r\n num_rows: 785\r\n})\r\n```\r\n`load_from_disk(PATH_DATA_CLS_B)['train']` yields:\r\n```python\r\nDataset({\r\n features: ['labels', 'text'],\r\n num_rows: 3341\r\n})\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2040\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2040\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2039","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2039\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2039\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2039\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2039","id":830047652,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxNjE3ODY3","number":2039,"title":"Doc2dial rc","user":{"login":"songfeng","id":2062185,"node_id":"MDQ6VXNlcjIwNjIxODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2062185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/songfeng","html_url":"https:\/\/github.com\/songfeng","followers_url":"https:\/\/api.github.com\/users\/songfeng\/followers","following_url":"https:\/\/api.github.com\/users\/songfeng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/songfeng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/songfeng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/songfeng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/songfeng\/orgs","repos_url":"https:\/\/api.github.com\/users\/songfeng\/repos","events_url":"https:\/\/api.github.com\/users\/songfeng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/songfeng\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-12T11:56:28Z","updated_at":"2021-03-12T15:32:36Z","closed_at":"2021-03-12T15:32:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2039","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2039","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2039.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2039.patch","merged_at":null},"body":"Added fix to handle the last turn that is a user turn.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2039\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2039\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2038","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2038\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2038\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2038\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2038","id":830036875,"node_id":"MDU6SXNzdWU4MzAwMzY4NzU=","number":2038,"title":"outdated dataset_infos.json might fail verifications","user":{"login":"songfeng","id":2062185,"node_id":"MDQ6VXNlcjIwNjIxODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2062185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/songfeng","html_url":"https:\/\/github.com\/songfeng","followers_url":"https:\/\/api.github.com\/users\/songfeng\/followers","following_url":"https:\/\/api.github.com\/users\/songfeng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/songfeng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/songfeng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/songfeng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/songfeng\/orgs","repos_url":"https:\/\/api.github.com\/users\/songfeng\/repos","events_url":"https:\/\/api.github.com\/users\/songfeng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/songfeng\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-12T11:41:54Z","updated_at":"2021-03-16T16:27:40Z","closed_at":"2021-03-16T16:27:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The [doc2dial\/dataset_infos.json](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/doc2dial\/dataset_infos.json) is outdated. It would fail data_loader when verifying download checksum etc..\r\n\r\nCould you please update this file or point me how to update this file?\r\n\r\nThank you.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2038\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2038\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2037","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2037\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2037\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2037\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2037","id":829919685,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxNTA4MTQz","number":2037,"title":"Fix: Wikipedia - save memory by replacing root.clear with elem.clear","user":{"login":"miyamonz","id":6331508,"node_id":"MDQ6VXNlcjYzMzE1MDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6331508?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/miyamonz","html_url":"https:\/\/github.com\/miyamonz","followers_url":"https:\/\/api.github.com\/users\/miyamonz\/followers","following_url":"https:\/\/api.github.com\/users\/miyamonz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/miyamonz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/miyamonz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/miyamonz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/miyamonz\/orgs","repos_url":"https:\/\/api.github.com\/users\/miyamonz\/repos","events_url":"https:\/\/api.github.com\/users\/miyamonz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/miyamonz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-12T09:22:00Z","updated_at":"2021-03-23T06:08:16Z","closed_at":"2021-03-16T11:01:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2037","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2037","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2037.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2037.patch","merged_at":"2021-03-16T11:01:22Z"},"body":"see: https:\/\/github.com\/huggingface\/datasets\/issues\/2031\r\n\r\nWhat I did:\r\n- replace root.clear with elem.clear\r\n- remove lines to get root element\r\n- $ make style\r\n- $ make test\r\n - some tests required some pip packages, I installed them.\r\n\r\ntest results on origin\/master and my branch are same. I think it's not related on my modification, isn't it?\r\n```\r\n==================================================================================== short test summary info ====================================================================================\r\nFAILED tests\/test_arrow_writer.py::TypedSequenceTest::test_catch_overflow - AssertionError: OverflowError not raised\r\n============================================================= 1 failed, 2332 passed, 5138 skipped, 70 warnings in 91.75s (0:01:31) ==============================================================\r\nmake: *** [Makefile:19: test] Error 1\r\n\r\n```\r\n\r\nIs there anything else I should do?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2037\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2037\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2036","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2036\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2036\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2036\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2036","id":829909258,"node_id":"MDU6SXNzdWU4Mjk5MDkyNTg=","number":2036,"title":"Cannot load wikitext","user":{"login":"Gpwner","id":19349207,"node_id":"MDQ6VXNlcjE5MzQ5MjA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19349207?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Gpwner","html_url":"https:\/\/github.com\/Gpwner","followers_url":"https:\/\/api.github.com\/users\/Gpwner\/followers","following_url":"https:\/\/api.github.com\/users\/Gpwner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Gpwner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Gpwner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Gpwner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Gpwner\/orgs","repos_url":"https:\/\/api.github.com\/users\/Gpwner\/repos","events_url":"https:\/\/api.github.com\/users\/Gpwner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Gpwner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-12T09:09:39Z","updated_at":"2021-03-15T08:45:02Z","closed_at":"2021-03-15T08:44:44Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"when I execute these codes\r\n```\r\n>>> from datasets import load_dataset\r\n>>> test_dataset = load_dataset(\"wikitext\")\r\n```\r\n\r\nI got an error,any help?\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/xxx\/anaconda3\/envs\/transformer\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 589, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/home\/xxx\/anaconda3\/envs\/transformer\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/home\/xxx\/anaconda3\/envs\/transformer\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/home\/xxx\/anaconda3\/envs\/transformer\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 487, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/wikitext\/wikitext.py\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2036\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2036\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2035","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2035\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2035\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2035\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2035","id":829475544,"node_id":"MDU6SXNzdWU4Mjk0NzU1NDQ=","number":2035,"title":"wiki40b\/wikipedia for almost all languages cannot be downloaded","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-03-11T19:54:54Z","updated_at":"2021-03-16T14:53:37Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am trying to download the data as below:\r\n\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"wiki40b\", \"cs\")\r\nprint(dataset)\r\n```\r\n\r\nI am getting this error. @lhoestq I will be grateful if you could assist me with this error. For almost all languages except english I am getting this error.\r\n\r\nI really need majority of languages in this dataset to be able to train my models for a deadline and your great scalable super well-written library is my only hope to train the models at scale while being low on resources. \r\n\r\nthank you very much.\r\n\r\n```\r\n(fast) dara@vgne046:\/user\/dara\/dev\/codes\/seq2seq$ python test_data.py\r\nDownloading and preparing dataset wiki40b\/cs (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to temp\/dara\/cache_home_2\/datasets\/wiki40b\/cs\/1.1.0\/063778187363ffb294896eaa010fc254b42b73e31117c71573a953b0b0bf010f...\r\nTraceback (most recent call last):\r\n File \"test_data.py\", line 3, in \r\n dataset = load_dataset(\"wiki40b\", \"cs\")\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 746, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 579, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1105, in _download_and_prepare\r\n import apache_beam as beam\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/apache_beam-2.28.0-py3.7-linux-x86_64.egg\/apache_beam\/__init__.py\", line 96, in \r\n from apache_beam import io\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/apache_beam-2.28.0-py3.7-linux-x86_64.egg\/apache_beam\/io\/__init__.py\", line 23, in \r\n from apache_beam.io.avroio import *\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/apache_beam-2.28.0-py3.7-linux-x86_64.egg\/apache_beam\/io\/avroio.py\", line 55, in \r\n import avro\r\n File \"\", line 983, in _find_and_load\r\n File \"\", line 967, in _find_and_load_unlocked\r\n File \"\", line 668, in _load_unlocked\r\n File \"\", line 638, in _load_backward_compatible\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/avro_python3-1.9.2.1-py3.7.egg\/avro\/__init__.py\", line 34, in \r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/avro_python3-1.9.2.1-py3.7.egg\/avro\/__init__.py\", line 30, in LoadResource\r\nNotADirectoryError: [Errno 20] Not a directory: '\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/avro_python3-1.9.2.1-py3.7.egg\/avro\/VERSION.txt'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2035\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2035\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2034","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2034\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2034\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2034\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2034","id":829381388,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxMDU2MTEw","number":2034,"title":"Fix typo","user":{"login":"pcyin","id":3413464,"node_id":"MDQ6VXNlcjM0MTM0NjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3413464?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pcyin","html_url":"https:\/\/github.com\/pcyin","followers_url":"https:\/\/api.github.com\/users\/pcyin\/followers","following_url":"https:\/\/api.github.com\/users\/pcyin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pcyin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pcyin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pcyin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pcyin\/orgs","repos_url":"https:\/\/api.github.com\/users\/pcyin\/repos","events_url":"https:\/\/api.github.com\/users\/pcyin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pcyin\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-11T17:46:13Z","updated_at":"2021-03-11T18:06:25Z","closed_at":"2021-03-11T18:06:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2034","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2034","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2034.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2034.patch","merged_at":"2021-03-11T18:06:25Z"},"body":"Change `ENV_XDG_CACHE_HOME ` to `XDG_CACHE_HOME `","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2034\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2034\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2033","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2033\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2033\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2033\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2033","id":829295339,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkwOTgzMDAy","number":2033,"title":"Raise an error for outdated sacrebleu versions","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-11T16:08:00Z","updated_at":"2021-03-11T17:58:12Z","closed_at":"2021-03-11T17:58:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2033","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2033","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2033.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2033.patch","merged_at":"2021-03-11T17:58:12Z"},"body":"The `sacrebleu` metric seem to only work for sacrecleu>=1.4.12\r\n\r\nFor example using sacrebleu==1.2.10, an error is raised (from metric\/sacrebleu\/sacrebleu.py):\r\n```python\r\n def _compute(\r\n self,\r\n predictions,\r\n references,\r\n smooth_method=\"exp\",\r\n smooth_value=None,\r\n force=False,\r\n lowercase=False,\r\n tokenize=scb.DEFAULT_TOKENIZER,\r\n use_effective_order=False,\r\n ):\r\n references_per_prediction = len(references[0])\r\n if any(len(refs) != references_per_prediction for refs in references):\r\n raise ValueError(\"Sacrebleu requires the same number of references for each prediction\")\r\n transformed_references = [[refs[i] for refs in references] for i in range(references_per_prediction)]\r\n> output = scb.corpus_bleu(\r\n sys_stream=predictions,\r\n ref_streams=transformed_references,\r\n smooth_method=smooth_method,\r\n smooth_value=smooth_value,\r\n force=force,\r\n lowercase=lowercase,\r\n tokenize=tokenize,\r\n use_effective_order=use_effective_order,\r\n )\r\n\r\nE TypeError: corpus_bleu() got an unexpected keyword argument 'smooth_method'\r\n\/mnt\/cache\/modules\/datasets_modules\/metrics\/sacrebleu\/b390045b3d1dd4abf6a95c4a2a11ee3bcc2b7620b076204d0ddc353fa649fd86\/sacrebleu.py:114: TypeError\r\n```\r\n\r\nI improved the error message when users have an outdated version of sacrebleu.\r\nThe new error message tells the user to update sacrebleu.\r\ncc @LysandreJik ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2033\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2033\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2032","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2032\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2032\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2032\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2032","id":829250912,"node_id":"MDU6SXNzdWU4MjkyNTA5MTI=","number":2032,"title":"Use Arrow filtering instead of writing a new arrow file for Dataset.filter","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"assignees":[{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-03-11T15:18:50Z","updated_at":"2021-03-11T17:20:57Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently the filter method reads the dataset batch by batch to write a new, filtered, arrow file on disk. Therefore all the reading + writing can take some time.\r\n\r\nUsing a mask directly on the arrow table doesn't do any read or write operation therefore it's significantly quicker.\r\n\r\nI think there are two cases:\r\n- if the dataset doesn't have an indices mapping, then one can simply use the arrow filtering on the main arrow table `dataset._data.filter(...)`\r\n- if the dataset an indices mapping, then the mask should be applied on the indices mapping table `dataset._indices.filter(...)`\r\n\r\nThe indices mapping is used to map between the idx at `dataset[idx]` in `__getitem__` and the idx in the actual arrow table.\r\n\r\nThe new filter method should therefore be faster, and allow users to pass either a filtering function (that returns a boolean given an example), or directly a mask.\r\n\r\nFeel free to discuss this idea in this thread :)\r\n\r\nOne additional note: the refactor at #2025 would make all the pickle-related stuff work directly with the arrow filtering, so that we only need to change the Dataset.filter method without having to deal with pickle.\r\n\r\ncc @theo-m @gchhablani \r\n\r\nrelated issues: #1796 #1949 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2032\/reactions","total_count":4,"+1":4,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2032\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2031","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2031\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2031\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2031\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2031","id":829122778,"node_id":"MDU6SXNzdWU4MjkxMjI3Nzg=","number":2031,"title":"wikipedia.py generator that extracts XML doesn't release memory","user":{"login":"miyamonz","id":6331508,"node_id":"MDQ6VXNlcjYzMzE1MDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6331508?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/miyamonz","html_url":"https:\/\/github.com\/miyamonz","followers_url":"https:\/\/api.github.com\/users\/miyamonz\/followers","following_url":"https:\/\/api.github.com\/users\/miyamonz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/miyamonz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/miyamonz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/miyamonz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/miyamonz\/orgs","repos_url":"https:\/\/api.github.com\/users\/miyamonz\/repos","events_url":"https:\/\/api.github.com\/users\/miyamonz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/miyamonz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-11T12:51:24Z","updated_at":"2021-03-22T08:33:52Z","closed_at":"2021-03-22T08:33:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I tried downloading Japanese wikipedia, but it always failed because of out of memory maybe.\r\n\r\nI found that the generator function that extracts XML data in wikipedia.py doesn't release memory in the loop.\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/13a5b7db992ad5cf77895e4c0f76595314390418\/datasets\/wikipedia\/wikipedia.py#L464-L502\r\n\r\n`root.clear()` intend to clear memory, but it doesn't.\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/13a5b7db992ad5cf77895e4c0f76595314390418\/datasets\/wikipedia\/wikipedia.py#L490\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/13a5b7db992ad5cf77895e4c0f76595314390418\/datasets\/wikipedia\/wikipedia.py#L494\r\nI replaced them with `elem.clear()`, then it seems to work correctly.\r\n\r\nhere is the notebook to reproduce it.\r\nhttps:\/\/gist.github.com\/miyamonz\/dc06117302b6e85fa51cbf46dde6bb51#file-xtract_content-ipynb","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2031\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2031\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2030","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2030\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2030\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2030\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2030","id":829110803,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkwODI4NzQ4","number":2030,"title":"Implement Dataset from text","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-11T12:34:50Z","updated_at":"2021-03-18T13:29:29Z","closed_at":"2021-03-18T13:29:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2030","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2030","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2030.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2030.patch","merged_at":"2021-03-18T13:29:29Z"},"body":"Implement `Dataset.from_text`.\r\n\r\nAnalogue to #1943, #1946.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2030\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2030\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2029","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2029\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2029\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2029\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2029","id":829097290,"node_id":"MDU6SXNzdWU4MjkwOTcyOTA=","number":2029,"title":"Loading a faiss index KeyError","user":{"login":"nbroad1881","id":24982805,"node_id":"MDQ6VXNlcjI0OTgyODA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24982805?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nbroad1881","html_url":"https:\/\/github.com\/nbroad1881","followers_url":"https:\/\/api.github.com\/users\/nbroad1881\/followers","following_url":"https:\/\/api.github.com\/users\/nbroad1881\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nbroad1881\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nbroad1881\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nbroad1881\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nbroad1881\/orgs","repos_url":"https:\/\/api.github.com\/users\/nbroad1881\/repos","events_url":"https:\/\/api.github.com\/users\/nbroad1881\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nbroad1881\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-11T12:16:13Z","updated_at":"2021-03-12T00:21:09Z","closed_at":"2021-03-12T00:21:09Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I've recently been testing out RAG and DPR embeddings, and I've run into an issue that is not apparent in the documentation.\r\n\r\nThe basic steps are:\r\n\r\n1. Create a dataset (dataset1)\r\n2. Create an embeddings column using DPR\r\n3. Add a faiss index to the dataset\r\n4. Save faiss index to a file\r\n5. Create a new dataset (dataset2) with the same text and label information as dataset1\r\n6. Try to load the faiss index from file to dataset2\r\n7. Get `KeyError: \"Column embeddings not in the dataset\"`\r\n\r\nI've made a colab notebook that should show exactly what I did. Please switch to GPU runtime; I didn't check on CPU.\r\n\r\nhttps:\/\/colab.research.google.com\/drive\/1X0S9ZuZ8k0ybcoei4w7so6dS_WrABmIx?usp=sharing\r\n\r\nUbuntu Version\r\nVERSION=\"18.04.5 LTS (Bionic Beaver)\"\r\n\r\ndatasets==1.4.1\r\nfaiss==1.5.3\r\nfaiss-gpu==1.7.0\r\ntorch==1.8.0+cu101\r\ntransformers==4.3.3\r\n\r\nNVIDIA-SMI 460.56\r\nDriver Version: 460.32.03\r\nCUDA Version: 11.2 \r\nTesla K80 \r\n\r\nI was basically following the steps here: https:\/\/huggingface.co\/docs\/datasets\/faiss_and_ea.html#adding-a-faiss-index\r\n\r\nI included the exact code from the documentation at the end of the notebook to show that they don't work either.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2029\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2029\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2028","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2028\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2028\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2028\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2028","id":828721393,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkwNDk1NzEx","number":2028,"title":"Adding PersiNLU reading-comprehension","user":{"login":"danyaljj","id":2441454,"node_id":"MDQ6VXNlcjI0NDE0NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2441454?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danyaljj","html_url":"https:\/\/github.com\/danyaljj","followers_url":"https:\/\/api.github.com\/users\/danyaljj\/followers","following_url":"https:\/\/api.github.com\/users\/danyaljj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danyaljj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danyaljj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danyaljj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danyaljj\/orgs","repos_url":"https:\/\/api.github.com\/users\/danyaljj\/repos","events_url":"https:\/\/api.github.com\/users\/danyaljj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danyaljj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-11T04:41:13Z","updated_at":"2021-03-15T09:39:57Z","closed_at":"2021-03-15T09:39:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2028","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2028","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2028.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2028.patch","merged_at":"2021-03-15T09:39:57Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2028\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2028\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2027","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2027\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2027\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2027\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2027","id":828490444,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkwMjkzNDA1","number":2027,"title":"Update format columns in Dataset.rename_columns","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-10T23:50:59Z","updated_at":"2021-03-11T14:38:40Z","closed_at":"2021-03-11T14:38:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2027","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2027","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2027.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2027.patch","merged_at":"2021-03-11T14:38:40Z"},"body":"Fixes #2026 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2027\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2027\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2026","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2026\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2026\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2026\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2026","id":828194467,"node_id":"MDU6SXNzdWU4MjgxOTQ0Njc=","number":2026,"title":"KeyError on using map after renaming a column","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-10T18:54:17Z","updated_at":"2021-03-11T14:39:34Z","closed_at":"2021-03-11T14:38:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI'm trying to use `cifar10` dataset. I want to rename the `img` feature to `image` in order to make it consistent with `mnist`, which I'm also planning to use. By doing this, I was trying to avoid modifying `prepare_train_features` function.\r\n\r\nHere is what I try:\r\n\r\n```python\r\ntransform = Compose([ToPILImage(),ToTensor(),Normalize([0.0,0.0,0.0],[1.0,1.0,1.0])])\r\ndef prepare_features(examples):\r\n images = []\r\n labels = []\r\n print(examples)\r\n for example_idx, example in enumerate(examples[\"image\"]):\r\n if transform is not None:\r\n images.append(transform(examples[\"image\"][example_idx].permute(2,0,1)))\r\n else:\r\n images.append(examples[\"image\"][example_idx].permute(2,0,1))\r\n labels.append(examples[\"label\"][example_idx])\r\n output = {\"label\":labels, \"image\":images}\r\n return output\r\n\r\nraw_dataset = load_dataset('cifar10')\r\nraw_dataset.set_format('torch',columns=['img','label'])\r\nraw_dataset = raw_dataset.rename_column('img','image')\r\n\r\nfeatures = datasets.Features({\r\n \"image\": datasets.Array3D(shape=(3,32,32),dtype=\"float32\"),\r\n \"label\": datasets.features.ClassLabel(names=[\r\n \"airplane\",\r\n \"automobile\",\r\n \"bird\",\r\n \"cat\",\r\n \"deer\",\r\n \"dog\",\r\n \"frog\",\r\n \"horse\",\r\n \"ship\",\r\n \"truck\",\r\n ]),\r\n })\r\ntrain_dataset = raw_dataset.map(prepare_features, features = features,batched=True, batch_size=10000)\r\n```\r\nThe error:\r\n```python\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n in ()\r\n 14 ]),\r\n 15 })\r\n---> 16 train_dataset = raw_dataset.map(prepare_features, features = features,batched=True, batch_size=10000)\r\n\r\n2 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)\r\n 1287 test_inputs = self[:2] if batched else self[0]\r\n 1288 test_indices = [0, 1] if batched else 0\r\n-> 1289 update_data = does_function_return_dict(test_inputs, test_indices)\r\n 1290 logger.info(\"Testing finished, running the mapping function on the dataset\")\r\n 1291 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in does_function_return_dict(inputs, indices)\r\n 1258 fn_args = [inputs] if input_columns is None else [inputs[col] for col in input_columns]\r\n 1259 processed_inputs = (\r\n-> 1260 function(*fn_args, indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n 1261 )\r\n 1262 does_return_dict = isinstance(processed_inputs, Mapping)\r\n\r\n in prepare_features(examples)\r\n 3 labels = []\r\n 4 print(examples)\r\n----> 5 for example_idx, example in enumerate(examples[\"image\"]):\r\n 6 if transform is not None:\r\n 7 images.append(transform(examples[\"image\"][example_idx].permute(2,0,1)))\r\n\r\nKeyError: 'image'\r\n```\r\n\r\nThe print statement inside returns this:\r\n```python\r\n{'label': tensor([6, 9])}\r\n```\r\nApparently, both `img` and `image` do not exist after renaming. \r\n\r\nNote that this code works fine with `img` everywhere.\r\n\r\nNotebook: https:\/\/colab.research.google.com\/drive\/1SzESAlz3BnVYrgQeJ838vbMp1OsukiA2?usp=sharing\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2026\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2026\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2025","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2025\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2025\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2025\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2025","id":828047476,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg5ODk2NjMz","number":2025,"title":"[Refactor] Use in-memory\/memory-mapped\/concatenation tables in Dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2021-03-10T17:00:47Z","updated_at":"2021-03-30T14:46:53Z","closed_at":"2021-03-26T16:51:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2025","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2025","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2025.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2025.patch","merged_at":"2021-03-26T16:51:58Z"},"body":"## Intro\r\n\r\nCurrently there is one assumption that we need to change: a dataset is either fully in memory (dataset._data_files is empty), or the dataset can be reloaded from disk with memory mapping (using the dataset._data_files).\r\nThis assumption is used for pickling for example:\r\n- in-memory dataset can just be pickled\/unpickled in-memory\r\n- on-disk dataset can be unloaded to only keep the filepaths when pickling, and then reloaded from the disk when unpickling\r\n\r\n## Issues\r\n\r\nBecause of this assumption, we can't easily implement methods like `Dataset.add_item` to append more rows to a dataset, or `dataset.add_column` to add a column, since we can't mix data from memory and data from the disk.\r\nMoreover, `concatenate_datasets` doesn't work if the datasets to concatenate are not all from memory, or all form the disk.\r\n\r\n## Solution provided in this PR\r\n\r\nI changed this by allowing several types of Table to be used in the Dataset object.\r\nMore specifically I added three pyarrow Table wrappers: InMemoryTable, MemoryMappedTable and ConcatenationTable.\r\nThe in-memory and memory-mapped tables implement the pickling behavior described above.\r\nThe ConcatenationTable can be made from several tables (either in-memory or memory mapped) called \"blocks\". Pickling a ConcatenationTable simply pickles the underlying blocks.\r\n\r\n## Implementation details\r\n\r\nThe three tables classes mentioned above all inherit from a `Table` class defined in `table.py`, which is a wrapper of a pyarrow table. The `Table` wrapper implements all the attributes and methods of the underlying pyarrow table.\r\n\r\nRegarding the MemoryMappedTable:\r\nReloading a pyarrow table from the disk makes you lose all the changes you may have applied (slice, rename_columns, drop, cast etc.). Therefore the MemoryMappedTable implements a \"replay\" mechanism to re-apply the changes when reloading the pyarrow table from the disk.\r\n\r\n## Checklist\r\n\r\n- [x] add InMemoryTable\r\n- [x] add MemoryMappedTable\r\n- [x] add ConcatenationTable\r\n- [x] Update the ArrowReader to use these new tables depending on the `in_memory` parameter\r\n- [x] Update Dataset.from_xxx methods\r\n- [x] Update load_from_disk and save_to_disk\r\n- [x] Backward compatibility of load_from_disk\r\n- [x] Add tests for the new tables\r\n- [x] Update current tests\r\n- [ ] Documentation\r\n\r\n----------\r\n\r\nI would be happy to discuss the design of this PR :)\r\n\r\nClose #1877 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2025\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":3,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2025\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2024","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2024\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2024\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2024\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2024","id":827842962,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg5NzEzNDAy","number":2024,"title":"Remove print statement from mnist.py","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-10T14:39:58Z","updated_at":"2021-03-11T18:03:52Z","closed_at":"2021-03-11T18:03:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2024","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2024","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2024.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2024.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2024\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2024\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2023","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2023\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2023\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2023\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2023","id":827819608,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg5NjkyNDU2","number":2023,"title":"Add Romanian to XQuAD","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-10T14:24:32Z","updated_at":"2021-03-15T10:08:17Z","closed_at":"2021-03-15T10:08:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2023","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2023","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2023.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2023.patch","merged_at":"2021-03-15T10:08:17Z"},"body":"On Jan 18, XQuAD was updated with a new Romanian validation file ([xquad commit link](https:\/\/github.com\/deepmind\/xquad\/commit\/60cac411649156efb6aab9dd4c9cde787a2c0345))\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2023\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2023\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2022","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2022\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2022\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2022\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2022","id":827435033,"node_id":"MDU6SXNzdWU4Mjc0MzUwMzM=","number":2022,"title":"ValueError when rename_column on splitted dataset","user":{"login":"simonschoe","id":53626067,"node_id":"MDQ6VXNlcjUzNjI2MDY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53626067?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/simonschoe","html_url":"https:\/\/github.com\/simonschoe","followers_url":"https:\/\/api.github.com\/users\/simonschoe\/followers","following_url":"https:\/\/api.github.com\/users\/simonschoe\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/simonschoe\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/simonschoe\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/simonschoe\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/simonschoe\/orgs","repos_url":"https:\/\/api.github.com\/users\/simonschoe\/repos","events_url":"https:\/\/api.github.com\/users\/simonschoe\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/simonschoe\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-10T09:40:38Z","updated_at":"2021-03-16T14:06:08Z","closed_at":"2021-03-16T14:05:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi there,\r\nI am loading `.tsv` file via `load_dataset` and subsequently split the rows into training and test set via the `ReadInstruction` API like so:\r\n\r\n```python\r\nsplit = {\r\n 'train': ReadInstruction('train', to=90, unit='%'),\r\n 'test': ReadInstruction('train', from_=-10, unit='%')\r\n}\r\n\r\ndataset = load_dataset(\r\n path='csv', # use 'text' loading script to load from local txt-files\r\n delimiter='\\t', # xxx\r\n data_files=text_files, # list of paths to local text files\r\n split=split, # xxx\r\n)\r\n\r\ndataset\r\n```\r\n\r\nPart of output:\r\n```python\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['sentence', 'sentiment'],\r\n num_rows: 900\r\n })\r\n test: Dataset({\r\n features: ['sentence', 'sentiment'],\r\n num_rows: 100\r\n })\r\n})\r\n```\r\nAfterwards I'd like to rename the 'sentence' column to 'text' in order to be compatible with my modelin pipeline. If I run the following code I experience a `ValueError` however:\r\n```python\r\ndataset['train'].rename_column('sentence', 'text')\r\n```\r\n```python\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/splits.py in __init__(self, name)\r\n 353 for split_name in split_names_from_instruction:\r\n 354 if not re.match(_split_re, split_name):\r\n--> 355 raise ValueError(f\"Split name should match '{_split_re}'' but got '{split_name}'.\")\r\n 356 \r\n 357 def __str__(self):\r\n\r\nValueError: Split name should match '^\\w+(\\.\\w+)*$'' but got 'ReadInstruction('.\r\n```\r\nIn particular, these behavior does not arise if I use the deprecated `rename_column_` method. Any idea what causes the error? Would assume something in the way I defined the split.\r\n\r\nThanks in advance! :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2022\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2022\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2021","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2021\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2021\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2021\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2021","id":826988016,"node_id":"MDU6SXNzdWU4MjY5ODgwMTY=","number":2021,"title":"Interactively doing save_to_disk and load_from_disk corrupts the datasets object?","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-10T02:48:34Z","updated_at":"2021-03-13T10:07:41Z","closed_at":"2021-03-13T10:07:41Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":" dataset_info.json file saved after using save_to_disk gets corrupted as follows. \r\n \r\n \r\n![image](https:\/\/user-images.githubusercontent.com\/16892570\/110568474-ed969880-81b7-11eb-832f-2e5129656016.png)\r\n\r\nIs there a way to disable the cache that will save to \/tmp\/huggiface\/datastes ? \r\nI have a feeling there is a serious issue with cashing.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2021\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2021\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2020","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2020\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2020\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2020\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2020","id":826961126,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg4OTE3MjYx","number":2020,"title":"Remove unnecessary docstart check in conll-like datasets","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-10T02:20:16Z","updated_at":"2021-03-11T13:33:37Z","closed_at":"2021-03-11T13:33:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2020","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2020","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2020.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2020.patch","merged_at":"2021-03-11T13:33:37Z"},"body":"Related to this PR: #1998\r\n\r\nAdditionally, this PR adds the docstart note to the conll2002 dataset card ([link](https:\/\/raw.githubusercontent.com\/teropa\/nlp\/master\/resources\/corpora\/conll2002\/ned.train) to the raw data with `DOCSTART` lines).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2020\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2020\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2019","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2019\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2019\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2019\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2019","id":826625706,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg4NjEyODgy","number":2019,"title":"Replace print with logging in dataset scripts","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-09T20:59:34Z","updated_at":"2021-03-12T10:09:01Z","closed_at":"2021-03-11T16:14:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2019","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2019","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2019.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2019.patch","merged_at":"2021-03-11T16:14:18Z"},"body":"Replaces `print(...)` in the dataset scripts with the library logger.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2019\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2019\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2018","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2018\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2018\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2018\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2018","id":826473764,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg4NDc0NTQz","number":2018,"title":"Md gender card update","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-09T18:57:20Z","updated_at":"2021-03-12T17:31:00Z","closed_at":"2021-03-12T17:31:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2018","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2018","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2018.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2018.patch","merged_at":"2021-03-12T17:31:00Z"},"body":"I updated the descriptions of the datasets as they appear in the HF repo and the descriptions of the source datasets according to what I could find from the paper and the references. I'm still a little unclear about some of the fields of the different configs, and there was little info on the word list and name list. I'll contact the authors to see if they have any additional information or suggested changes.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2018\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2018\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2017","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2017\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2017\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2017\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2017","id":826428578,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg4NDMyNDc2","number":2017,"title":"Add TF-based Features to handle different modes of data","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T18:29:52Z","updated_at":"2021-03-17T12:32:08Z","closed_at":"2021-03-17T12:32:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2017","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2017","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2017.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2017.patch","merged_at":null},"body":"Hi,\r\n\r\nI am creating this draft PR to work on add features similar to [TF datasets](https:\/\/github.com\/tensorflow\/datasets\/tree\/master\/tensorflow_datasets\/core\/features). I'll be starting with `Tensor` and `FeatureConnector` classes, and build upon them to add other features as well. This is a work in progress.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2017\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2017\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2016","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2016\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2016\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2016\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2016","id":825965493,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg4MDA5NjEz","number":2016,"title":"Not all languages have 2 digit codes.","user":{"login":"asiddhant","id":13891775,"node_id":"MDQ6VXNlcjEzODkxNzc1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13891775?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/asiddhant","html_url":"https:\/\/github.com\/asiddhant","followers_url":"https:\/\/api.github.com\/users\/asiddhant\/followers","following_url":"https:\/\/api.github.com\/users\/asiddhant\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/asiddhant\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/asiddhant\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/asiddhant\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/asiddhant\/orgs","repos_url":"https:\/\/api.github.com\/users\/asiddhant\/repos","events_url":"https:\/\/api.github.com\/users\/asiddhant\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/asiddhant\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T13:53:39Z","updated_at":"2021-03-11T18:01:03Z","closed_at":"2021-03-11T18:01:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2016","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2016","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2016.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2016.patch","merged_at":"2021-03-11T18:01:03Z"},"body":".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2016\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2016\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2015","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2015\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2015\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2015\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2015","id":825942108,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg3OTg4NTQ0","number":2015,"title":"Fix ipython function creation in tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T13:36:59Z","updated_at":"2021-03-09T14:06:04Z","closed_at":"2021-03-09T14:06:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2015","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2015","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2015.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2015.patch","merged_at":"2021-03-09T14:06:03Z"},"body":"The test at `tests\/test_caching.py::RecurseDumpTest::test_dump_ipython_function` was failing in python 3.8 because the ipython function was not properly created.\r\n\r\nFix #2010 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2015\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2015\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2014","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2014\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2014\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2014\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2014","id":825916531,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg3OTY1NDg3","number":2014,"title":"more explicit method parameters","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T13:18:29Z","updated_at":"2021-03-10T10:08:37Z","closed_at":"2021-03-10T10:08:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2014","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2014","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2014.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2014.patch","merged_at":"2021-03-10T10:08:36Z"},"body":"re: #2009\n\nnot super convinced this is better, and while I usually fight against kwargs here it seems to me that it better conveys the relationship to the `_split_generator` method.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2014\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2014\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2013","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2013\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2013\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2013\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2013","id":825694305,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg3NzYzMTgx","number":2013,"title":"Add Cryptonite dataset","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T10:32:11Z","updated_at":"2021-03-09T19:27:07Z","closed_at":"2021-03-09T19:27:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2013","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2013","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2013.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2013.patch","merged_at":"2021-03-09T19:27:06Z"},"body":"cc @aviaefrat who's the original author of the dataset & paper, see https:\/\/github.com\/aviaefrat\/cryptonite","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2013\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2013\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2012","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2012\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2012\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2012\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2012","id":825634064,"node_id":"MDU6SXNzdWU4MjU2MzQwNjQ=","number":2012,"title":"No upstream branch","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-03-09T09:48:55Z","updated_at":"2021-03-09T11:33:31Z","closed_at":"2021-03-09T11:33:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Feels like the documentation on adding a new dataset is outdated?\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/987df6b4e9e20fc0c92bc9df48137d170756fd7b\/ADD_NEW_DATASET.md#L49-L54\r\n\r\nThere is no upstream branch on remote. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2012\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2012\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2011","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2011\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2011\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2011\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2011","id":825621952,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg3Njk4MTAx","number":2011,"title":"Add RoSent Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T09:40:08Z","updated_at":"2021-03-11T18:00:52Z","closed_at":"2021-03-11T18:00:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2011","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2011","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2011.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2011.patch","merged_at":"2021-03-11T18:00:52Z"},"body":"This PR adds a Romanian sentiment analysis dataset. This PR also closes pending PR #1529.\r\n\r\nI had to add an `original_id` feature because the dataset files have repeated IDs. I can remove them if needed. I have also added `id` which is unique.\r\n\r\nLet me know in case of any issues.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2011\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2011\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2010","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2010\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2010\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2010\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2010","id":825567635,"node_id":"MDU6SXNzdWU4MjU1Njc2MzU=","number":2010,"title":"Local testing fails","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-03-09T09:01:38Z","updated_at":"2021-03-09T14:06:03Z","closed_at":"2021-03-09T14:06:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm following the CI setup as described in \r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/8eee4fa9e133fe873a7993ba746d32ca2b687551\/.circleci\/config.yml#L16-L19\r\n\r\nin a new conda environment, at commit https:\/\/github.com\/huggingface\/datasets\/commit\/4de6dbf84e93dad97e1000120d6628c88954e5d4\r\n\r\nand getting\r\n\r\n```\r\nFAILED tests\/test_caching.py::RecurseDumpTest::test_dump_ipython_function - TypeError: an integer is required (got type bytes)\r\n1 failed, 2321 passed, 5109 skipped, 10 warnings in 124.32s (0:02:04)\r\n```\r\n\r\nSeems like a discrepancy with CI, perhaps a lib version that's not controlled? \r\nTried with `pyarrow=={1.0.0,0.17.1,2.0.0}`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2010\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2010\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2009","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2009\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2009\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2009\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2009","id":825541366,"node_id":"MDU6SXNzdWU4MjU1NDEzNjY=","number":2009,"title":"Ambiguous documentation","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"assignees":[{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-03-09T08:42:11Z","updated_at":"2021-03-12T15:01:34Z","closed_at":"2021-03-12T15:01:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"https:\/\/github.com\/huggingface\/datasets\/blob\/2ac9a0d24a091989f869af55f9f6411b37ff5188\/templates\/new_dataset_script.py#L156-L158\r\n\r\nLooking at the template, I find this documentation line to be confusing, the method parameters don't include the `gen_kwargs` so I'm unclear where they're coming from.\r\n\r\nHappy to push a PR with a clearer statement when I understand the meaning.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2009\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2009\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2008","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2008\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2008\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2008\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2008","id":825153804,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg3Mjc1Njk4","number":2008,"title":"Fix various typos\/grammer in the docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-09T01:39:28Z","updated_at":"2021-03-15T18:42:49Z","closed_at":"2021-03-09T10:21:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2008","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2008","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2008.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2008.patch","merged_at":"2021-03-09T10:21:32Z"},"body":"This PR:\r\n* fixes various typos\/grammer I came across while reading the docs\r\n* adds the \"Install with conda\" installation instructions\r\n\r\nCloses #1959 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2008\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2008\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2007","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2007\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2007\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2007\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2007","id":824518158,"node_id":"MDU6SXNzdWU4MjQ1MTgxNTg=","number":2007,"title":"How to not load huggingface datasets into memory ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-08T12:35:26Z","updated_at":"2021-08-04T18:02:25Z","closed_at":"2021-08-04T18:02:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am running this example from transformers library version 4.3.3:\r\n(Here is the full documentation https:\/\/github.com\/huggingface\/transformers\/issues\/8771 but the running command should work out of the box)\r\n\r\n USE_TF=0 deepspeed run_seq2seq.py --model_name_or_path google\/mt5-base --dataset_name wmt16 --dataset_config_name ro-en --source_prefix \"translate English to Romanian: \" --task translation_en_to_ro --output_dir \/test\/test_large --do_train --do_eval --predict_with_generate --max_train_samples 500 --max_val_samples 500 --max_source_length 128 --max_target_length 128 --sortish_sampler --per_device_train_batch_size 8 --val_max_target_length 128 --deepspeed ds_config.json --num_train_epochs 1 --eval_steps 25000 --warmup_steps 500 --overwrite_output_dir\r\n\r\n(Here please find the script: https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/seq2seq\/run_seq2seq.py)\r\n\r\nIf you do not pass max_train_samples in above command to load the full dataset, then I get memory issue on a gpu with 24 GigBytes of memory.\r\n \r\nI need to train large-scale mt5 model on large-scale datasets of wikipedia (multiple of them concatenated or other datasets in multiple languages like OPUS), could you help me how I can avoid loading the full data into memory? to make the scripts not related to data size? \r\n\r\nIn above example, I was hoping the script could work without relying on dataset size, so I can still train the model without subsampling training set.\r\n\r\nthank you so much @lhoestq for your great help in advance\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2007\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2007\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2006","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2006\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2006\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2006\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2006","id":824457794,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg2Njg5Nzk2","number":2006,"title":"Don't gitignore dvc.lock","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-08T11:13:08Z","updated_at":"2021-03-08T11:28:35Z","closed_at":"2021-03-08T11:28:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2006","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2006","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2006.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2006.patch","merged_at":"2021-03-08T11:28:34Z"},"body":"The benchmarks runs are [failing](https:\/\/github.com\/huggingface\/datasets\/runs\/2055534629?check_suite_focus=true) because of \r\n```\r\nERROR: 'dvc.lock' is git-ignored.\r\n```\r\n\r\nI removed the dvc.lock file from the gitignore to fix that","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2006\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2006\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2005","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2005\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2005\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2005\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2005","id":824275035,"node_id":"MDU6SXNzdWU4MjQyNzUwMzU=","number":2005,"title":"Setting to torch format not working with torchvision and MNIST","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-03-08T07:38:11Z","updated_at":"2021-03-09T17:58:13Z","closed_at":"2021-03-09T17:58:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\n\r\nI am trying to use `torchvision.transforms` to handle the transformation of the image data in the `mnist` dataset. Assume I have a `transform` variable which contains the `torchvision.transforms` object.\r\n\r\nA snippet of what I am trying to do:\r\n```python\r\ndef prepare_features(examples):\r\n images = []\r\n labels = []\r\n for example_idx, example in enumerate(examples[\"image\"]):\r\n if transform is not None:\r\n images.append(transform(\r\n np.array(examples[\"image\"][example_idx], dtype=np.uint8)\r\n ))\r\n else:\r\n images.append(torch.tensor(np.array(examples[\"image\"][example_idx], dtype=np.uint8)))\r\n labels.append(torch.tensor(examples[\"label\"][example_idx]))\r\n output = {\"label\":labels, \"image\":images}\r\n return output\r\n\r\nraw_dataset = load_dataset('mnist')\r\ntrain_dataset = raw_dataset.map(prepare_features, batched=True, batch_size=10000)\r\ntrain_dataset.set_format(\"torch\",columns=[\"image\",\"label\"])\r\n```\r\n\r\nAfter this, I check the type of the following:\r\n```python\r\nprint(type(train_dataset[\"train\"][\"label\"]))\r\nprint(type(train_dataset[\"train\"][\"image\"][0]))\r\n```\r\nThis leads to the following output:\r\n\r\n```python\r\n\r\n\r\n```\r\nI use `torch.utils.DataLoader` for batches, the type of `batch[\"train\"][\"image\"]` is also ``.\r\n\r\nI don't understand why only the `label` is converted to a torch tensor, why does the image not get converted? How can I fix this issue?\r\n\r\nThanks,\r\nGunjan\r\n\r\nEDIT:\r\nI just checked the shapes, and the types, `batch[image]` is a actually a list of list of tensors. Shape is (1,28,2,28), where `batch_size` is 2. I don't understand why this is happening. Ideally it should be a tensor of shape (2,1,28,28).\r\n\r\nEDIT 2:\r\nInside `prepare_train_features`, the shape of `images[0]` is `torch.Size([1,28,28])`, the conversion is working. However, the output of the `map` is a list of list of list of list.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2005\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2005\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2004","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2004\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2004\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2004\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2004","id":824080760,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg2MzcyODY1","number":2004,"title":"LaRoSeDa","user":{"login":"MihaelaGaman","id":6823177,"node_id":"MDQ6VXNlcjY4MjMxNzc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6823177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MihaelaGaman","html_url":"https:\/\/github.com\/MihaelaGaman","followers_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/followers","following_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/orgs","repos_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/repos","events_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-08T01:06:32Z","updated_at":"2021-03-17T10:43:20Z","closed_at":"2021-03-17T10:43:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2004","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2004","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2004.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2004.patch","merged_at":"2021-03-17T10:43:20Z"},"body":"Add LaRoSeDa to huggingface datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2004\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2004\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2003","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2003\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2003\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2003\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2003","id":824034678,"node_id":"MDU6SXNzdWU4MjQwMzQ2Nzg=","number":2003,"title":"Messages are being printed to the `stdout`","user":{"login":"mahnerak","id":1367529,"node_id":"MDQ6VXNlcjEzNjc1Mjk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1367529?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mahnerak","html_url":"https:\/\/github.com\/mahnerak","followers_url":"https:\/\/api.github.com\/users\/mahnerak\/followers","following_url":"https:\/\/api.github.com\/users\/mahnerak\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mahnerak\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mahnerak\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mahnerak\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mahnerak\/orgs","repos_url":"https:\/\/api.github.com\/users\/mahnerak\/repos","events_url":"https:\/\/api.github.com\/users\/mahnerak\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mahnerak\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-07T22:09:34Z","updated_at":"2021-03-15T17:47:47Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In this code segment, we can see some messages are being printed to the `stdout`.\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/7e60bb509b595e8edc60a87f32b2bacfc065d607\/src\/datasets\/builder.py#L545-L554\r\nAccording to the comment, it is done intentionally, but I don't really understand why don't we log it with a higher level or print it directly to the `stderr`.\r\nIn my opinion, this kind of messages should never printed to the stdout. At least some configuration\/flag should make it possible to provide in order to explicitly prevent the package to contaminate the stdout.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2003\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2003\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2002","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2002\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2002\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2002\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2002","id":823955744,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg2MjgwNzE3","number":2002,"title":"MOROCO","user":{"login":"MihaelaGaman","id":6823177,"node_id":"MDQ6VXNlcjY4MjMxNzc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6823177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MihaelaGaman","html_url":"https:\/\/github.com\/MihaelaGaman","followers_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/followers","following_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/orgs","repos_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/repos","events_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-07T16:22:17Z","updated_at":"2021-03-19T09:52:06Z","closed_at":"2021-03-19T09:52:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2002","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2002","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2002.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2002.patch","merged_at":"2021-03-19T09:52:06Z"},"body":"Add MOROCO to huggingface datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2002\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2002\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2001","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2001\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2001\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2001\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2001","id":823946706,"node_id":"MDU6SXNzdWU4MjM5NDY3MDY=","number":2001,"title":"Empty evidence document (\"provenance\") in KILT ELI5 dataset","user":{"login":"donggyukimc","id":16605764,"node_id":"MDQ6VXNlcjE2NjA1NzY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16605764?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/donggyukimc","html_url":"https:\/\/github.com\/donggyukimc","followers_url":"https:\/\/api.github.com\/users\/donggyukimc\/followers","following_url":"https:\/\/api.github.com\/users\/donggyukimc\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/donggyukimc\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/donggyukimc\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/donggyukimc\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/donggyukimc\/orgs","repos_url":"https:\/\/api.github.com\/users\/donggyukimc\/repos","events_url":"https:\/\/api.github.com\/users\/donggyukimc\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/donggyukimc\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-07T15:41:35Z","updated_at":"2021-03-17T05:51:01Z","closed_at":"2021-03-17T05:51:01Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the original KILT benchmark(https:\/\/github.com\/facebookresearch\/KILT), \r\n\r\nall samples has its evidence document (i.e. wikipedia page id) for prediction.\r\n\r\nFor example, a sample in ELI5 dataset has the format including provenance (=evidence document) like this\r\n\r\n`{\"id\": \"1kiwfx\", \"input\": \"In Trading Places (1983, Akroyd\/Murphy) how does the scheme at the end of the movie work? Why would buying a lot of OJ at a high price ruin the Duke Brothers?\", \"output\": [{\"answer\": \"I feel so old. People have been askinbg what happened at the end of this movie for what must be the last 15 years of my life. It never stops. Every year\/month\/fortnight, I see someone asking what happened, and someone explaining. Andf it will keep on happening, until I am 90yrs old, in a home, with nothing but the Internet and my bladder to keep me going. And there it will be: \\\"what happens at the end of Trading Places?\\\"\"}, {\"provenance\": [{\"wikipedia_id\": \"242855\", \"title\": \"Futures contract\", \"section\": \"Section::::Abstract.\", \"start_paragraph_id\": 1, \"start_character\": 14, \"end_paragraph_id\": 1, \"end_character\": 612, \"bleu_score\": 0.9232808519770748}]}], \"meta\": {\"partial_evidence\": [{\"wikipedia_id\": \"520990\", \"title\": \"Trading Places\", \"section\": \"Section::::Plot.\\n\", \"start_paragraph_id\": 7, \"end_paragraph_id\": 7, \"meta\": {\"evidence_span\": [\"On television, they learn that Clarence Beeks is transporting a secret USDA report on orange crop forecasts.\", \"On television, they learn that Clarence Beeks is transporting a secret USDA report on orange crop forecasts. Winthorpe and Valentine recall large payments made to Beeks by the Dukes and realize that the Dukes plan to obtain the report to corner the market on frozen orange juice.\", \"Winthorpe and Valentine recall large payments made to Beeks by the Dukes and realize that the Dukes plan to obtain the report to corner the market on frozen orange juice.\"]}}]}}`\r\n\r\nHowever, KILT ELI5 dataset from huggingface datasets library only contain empty list of provenance.\r\n\r\n`{'id': '1oy5tc', 'input': 'in football whats the point of wasting the first two plays with a rush - up the middle - not regular rush plays i get those', 'meta': {'left_context': '', 'mention': '', 'obj_surface': [], 'partial_evidence': [], 'right_context': '', 'sub_surface': [], 'subj_aliases': [], 'template_questions': []}, 'output': [{'answer': 'In most cases the O-Line is supposed to make a hole for the running back to go through. If you run too many plays to the outside\/throws the defense will catch on.\\n\\nAlso, 2 5 yard plays gets you a new set of downs.', 'meta': {'score': 2}, 'provenance': []}, {'answer': \"I you don't like those type of plays, watch CFL. We only get 3 downs so you can't afford to waste one. Lots more passing.\", 'meta': {'score': 2}, 'provenance': []}]}\r\n`\r\n\r\nshould i perform other procedure to obtain evidence documents?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2001\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2001\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2000","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2000\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2000\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2000\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2000","id":823899910,"node_id":"MDU6SXNzdWU4MjM4OTk5MTA=","number":2000,"title":"Windows Permission Error (most recent version of datasets)","user":{"login":"itsLuisa","id":73881148,"node_id":"MDQ6VXNlcjczODgxMTQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73881148?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/itsLuisa","html_url":"https:\/\/github.com\/itsLuisa","followers_url":"https:\/\/api.github.com\/users\/itsLuisa\/followers","following_url":"https:\/\/api.github.com\/users\/itsLuisa\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/itsLuisa\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/itsLuisa\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/itsLuisa\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/itsLuisa\/orgs","repos_url":"https:\/\/api.github.com\/users\/itsLuisa\/repos","events_url":"https:\/\/api.github.com\/users\/itsLuisa\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/itsLuisa\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-03-07T11:55:28Z","updated_at":"2021-03-09T12:42:57Z","closed_at":"2021-03-09T12:42:57Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi everyone,\r\nCan anyone help me with why the dataset loading script below raises a Windows Permission Error? I stuck quite closely to https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/conll2003\/conll2003.py , only I want to load the data from three local three-column tsv-files (id\\ttokens\\tpos_tags\\n). I am using the most recent version of datasets. Thank you in advance!\r\nLuisa\r\n\r\nMy script:\r\n```\r\nimport datasets\r\nimport csv\r\n\r\nlogger = datasets.logging.get_logger(__name__)\r\n\r\n\r\nclass SampleConfig(datasets.BuilderConfig):\r\n\r\n def __init__(self, **kwargs):\r\n super(SampleConfig, self).__init__(**kwargs)\r\n\r\n\r\nclass Sample(datasets.GeneratorBasedBuilder):\r\n BUILDER_CONFIGS = [\r\n SampleConfig(name=\"conll2003\", version=datasets.Version(\"1.0.0\"), description=\"Conll2003 dataset\"),\r\n ]\r\n\r\n def _info(self):\r\n return datasets.DatasetInfo(\r\n description=\"Dataset with words and their POS-Tags\",\r\n features=datasets.Features(\r\n {\r\n \"id\": datasets.Value(\"string\"),\r\n \"tokens\": datasets.Sequence(datasets.Value(\"string\")),\r\n \"pos_tags\": datasets.Sequence(\r\n datasets.features.ClassLabel(\r\n names=[\r\n \"''\",\r\n \",\",\r\n \"-LRB-\",\r\n \"-RRB-\",\r\n \".\",\r\n \":\",\r\n \"CC\",\r\n \"CD\",\r\n \"DT\",\r\n \"EX\",\r\n \"FW\",\r\n \"HYPH\",\r\n \"IN\",\r\n \"JJ\",\r\n \"JJR\",\r\n \"JJS\",\r\n \"MD\",\r\n \"NN\",\r\n \"NNP\",\r\n \"NNPS\",\r\n \"NNS\",\r\n \"PDT\",\r\n \"POS\",\r\n \"PRP\",\r\n \"PRP$\",\r\n \"RB\",\r\n \"RBR\",\r\n \"RBS\",\r\n \"RP\",\r\n \"TO\",\r\n \"UH\",\r\n \"VB\",\r\n \"VBD\",\r\n \"VBG\",\r\n \"VBN\",\r\n \"VBP\",\r\n \"VBZ\",\r\n \"WDT\",\r\n \"WP\",\r\n \"WRB\",\r\n \"``\"\r\n ]\r\n )\r\n ),\r\n }\r\n ),\r\n supervised_keys=None,\r\n homepage=\"https:\/\/catalog.ldc.upenn.edu\/LDC2011T03\",\r\n citation=\"Weischedel, Ralph, et al. OntoNotes Release 4.0 LDC2011T03. Web Download. Philadelphia: Linguistic Data Consortium, 2011.\",\r\n )\r\n\r\n def _split_generators(self, dl_manager):\r\n loaded_files = dl_manager.download_and_extract(self.config.data_files)\r\n return [\r\n datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={\"filepath\": loaded_files[\"train\"]}),\r\n datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={\"filepath\": loaded_files[\"test\"]}),\r\n datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={\"filepath\": loaded_files[\"val\"]})\r\n ]\r\n\r\n def _generate_examples(self, filepath):\r\n logger.info(\"generating examples from = %s\", filepath)\r\n with open(filepath, encoding=\"cp1252\") as f:\r\n data = csv.reader(f, delimiter=\"\\t\")\r\n ids = list()\r\n tokens = list()\r\n pos_tags = list()\r\n for id_, line in enumerate(data):\r\n #print(line)\r\n if len(line) == 1:\r\n if tokens:\r\n yield id_, {\"id\": ids, \"tokens\": tokens, \"pos_tags\": pos_tags}\r\n ids = list()\r\n tokens = list()\r\n pos_tags = list()\r\n else:\r\n ids.append(line[0])\r\n tokens.append(line[1])\r\n pos_tags.append(line[2])\r\n # last example\r\n yield id_, {\"id\": ids, \"tokens\": tokens, \"pos_tags\": pos_tags}\r\n\r\n\r\ndef main():\r\n dataset = datasets.load_dataset(\r\n \"data_loading.py\", data_files={\r\n \"train\": \"train.tsv\",\r\n \"test\": \"test.tsv\",\r\n \"val\": \"val.tsv\"\r\n }\r\n )\r\n\r\n #print(dataset)\r\n\r\nif __name__==\"__main__\":\r\n main()\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2000\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2000\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1999","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1999\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1999\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1999\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1999","id":823753591,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg2MTM5ODMy","number":1999,"title":"Add FashionMNIST dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-06T21:36:57Z","updated_at":"2021-03-09T09:52:11Z","closed_at":"2021-03-09T09:52:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1999","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1999","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1999.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1999.patch","merged_at":"2021-03-09T09:52:11Z"},"body":"This PR adds [FashionMNIST](https:\/\/github.com\/zalandoresearch\/fashion-mnist) dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1999\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1999\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1998","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1998\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1998\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1998\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1998","id":823723960,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg2MTE4NTQ4","number":1998,"title":"Add -DOCSTART- note to dataset card of conll-like datasets","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-06T19:08:29Z","updated_at":"2021-03-11T02:20:07Z","closed_at":"2021-03-11T02:20:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1998","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1998","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1998.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1998.patch","merged_at":null},"body":"Closes #1983","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1998\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1998\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1997","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1997\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1997\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1997\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1997","id":823679465,"node_id":"MDU6SXNzdWU4MjM2Nzk0NjU=","number":1997,"title":"from datasets import MoleculeDataset, GEOMDataset","user":{"login":"futianfan","id":5087210,"node_id":"MDQ6VXNlcjUwODcyMTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5087210?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/futianfan","html_url":"https:\/\/github.com\/futianfan","followers_url":"https:\/\/api.github.com\/users\/futianfan\/followers","following_url":"https:\/\/api.github.com\/users\/futianfan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/futianfan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/futianfan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/futianfan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/futianfan\/orgs","repos_url":"https:\/\/api.github.com\/users\/futianfan\/repos","events_url":"https:\/\/api.github.com\/users\/futianfan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/futianfan\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-06T15:50:19Z","updated_at":"2021-03-06T16:13:26Z","closed_at":"2021-03-06T16:13:26Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I met the ImportError: cannot import name 'MoleculeDataset' from 'datasets'. Have anyone met the similar issues? Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1997\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1997\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1996","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1996\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1996\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1996\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1996","id":823573410,"node_id":"MDU6SXNzdWU4MjM1NzM0MTA=","number":1996,"title":"Error when exploring `arabic_speech_corpus`","user":{"login":"elgeish","id":6879673,"node_id":"MDQ6VXNlcjY4Nzk2NzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6879673?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/elgeish","html_url":"https:\/\/github.com\/elgeish","followers_url":"https:\/\/api.github.com\/users\/elgeish\/followers","following_url":"https:\/\/api.github.com\/users\/elgeish\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/elgeish\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/elgeish\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/elgeish\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/elgeish\/orgs","repos_url":"https:\/\/api.github.com\/users\/elgeish\/repos","events_url":"https:\/\/api.github.com\/users\/elgeish\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/elgeish\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-06T05:55:20Z","updated_at":"2021-03-09T11:12:25Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Navigate to https:\/\/huggingface.co\/datasets\/viewer\/?dataset=arabic_speech_corpus\r\n\r\nError:\r\n```\r\nImportError: To be able to use this dataset, you need to install the following dependencies['soundfile'] using 'pip install soundfile' for instance'\r\nTraceback:\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/streamlit\/script_runner.py\", line 332, in _run_script\r\n exec(code, module.__dict__)\r\nFile \"\/home\/sasha\/nlp-viewer\/run.py\", line 233, in \r\n configs = get_confs(option)\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/streamlit\/caching.py\", line 604, in wrapped_func\r\n return get_or_create_cached_value()\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/streamlit\/caching.py\", line 588, in get_or_create_cached_value\r\n return_value = func(*args, **kwargs)\r\nFile \"\/home\/sasha\/nlp-viewer\/run.py\", line 145, in get_confs\r\n module_path = nlp.load.prepare_module(path, dataset=True\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 342, in prepare_module\r\n f\"To be able to use this {module_type}, you need to install the following dependencies\"\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1996\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1996\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1995","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1995\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1995\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1995\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1995","id":822878431,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg1NDI5NTg0","number":1995,"title":"[Timit_asr] Make sure not only the first sample is used ","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-05T08:42:51Z","updated_at":"2021-06-30T06:25:53Z","closed_at":"2021-03-05T08:58:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1995","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1995","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1995.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1995.patch","merged_at":"2021-03-05T08:58:59Z"},"body":"When playing around with timit I noticed that only the first sample is used for all indices. I corrected this typo so that the dataset is correctly loaded.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1995\/reactions","total_count":4,"+1":4,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1995\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1994","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1994\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1994\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1994\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1994","id":822871238,"node_id":"MDU6SXNzdWU4MjI4NzEyMzg=","number":1994,"title":"not being able to get wikipedia es language","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-03-05T08:31:48Z","updated_at":"2021-03-11T20:46:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am trying to run a code with wikipedia of config 20200501.es, getting:\r\n\r\nTraceback (most recent call last):\r\n File \"run_mlm_t5.py\", line 608, in \r\n main()\r\n File \"run_mlm_t5.py\", line 359, in main\r\n datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name)\r\n File \"\/dara\/libs\/anaconda3\/envs\/success432\/lib\/python3.7\/site-packages\/datasets-1.2.1-py3.7.egg\/datasets\/load.py\", line 612, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/dara\/libs\/anaconda3\/envs\/success432\/lib\/python3.7\/site-packages\/datasets-1.2.1-py3.7.egg\/datasets\/builder.py\", line 527, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/dara\/libs\/anaconda3\/envs\/success432\/lib\/python3.7\/site-packages\/datasets-1.2.1-py3.7.egg\/datasets\/builder.py\", line 1050, in _download_and_prepare\r\n \"\\n\\t`{}`\".format(usage_example)\r\ndatasets.builder.MissingBeamOptions: Trying to generate a dataset using Apache Beam, yet no Beam Runner or PipelineOptions() has been provided in `load_dataset` or in the builder arguments. For big datasets it has to run on large-scale data processing tools like Dataflow, Spark, etc. More information about Apache Beam runners at https:\/\/beam.apache.org\/documentation\/runners\/capability-matrix\/\r\nIf you really want to run it locally because you feel like the Dataset is small enough, you can use the local beam runner called `DirectRunner` (you may run out of memory). \r\nExample of usage: \r\n\t`load_dataset('wikipedia', '20200501.es', beam_runner='DirectRunner')`\r\n\r\nthanks @lhoestq for any suggestion\/help ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1994\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1994\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1993","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1993\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1993\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1993\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1993","id":822758387,"node_id":"MDU6SXNzdWU4MjI3NTgzODc=","number":1993,"title":"How to load a dataset with load_from disk and save it again after doing transformations without changing the original? ","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-05T05:25:50Z","updated_at":"2021-03-22T04:05:50Z","closed_at":"2021-03-22T04:05:50Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am using the latest datasets library. In my work, I first use **load_from_disk** to load a data set that contains 3.8Gb information. Then during my training process, I update that dataset object and add new elements and save it in a different place. \r\n\r\nWhen I save the dataset with **save_to_disk**, the original dataset which is already in the disk also gets updated. I do not want to update it. How to prevent from this?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1993\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1993\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1992","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1992\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1992\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1992\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1992","id":822672238,"node_id":"MDU6SXNzdWU4MjI2NzIyMzg=","number":1992,"title":"`datasets.map` multi processing much slower than single processing ","user":{"login":"hwijeen","id":29157715,"node_id":"MDQ6VXNlcjI5MTU3NzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29157715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hwijeen","html_url":"https:\/\/github.com\/hwijeen","followers_url":"https:\/\/api.github.com\/users\/hwijeen\/followers","following_url":"https:\/\/api.github.com\/users\/hwijeen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hwijeen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hwijeen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hwijeen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hwijeen\/orgs","repos_url":"https:\/\/api.github.com\/users\/hwijeen\/repos","events_url":"https:\/\/api.github.com\/users\/hwijeen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hwijeen\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-05T02:10:02Z","updated_at":"2021-07-19T10:05:09Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, thank you for the great library.\r\n\r\nI've been using datasets to pretrain language models, and it often involves datasets as large as ~70G.\r\nMy data preparation step is roughly two steps: `load_dataset` which splits corpora into a table of sentences, and `map` converts a sentence into a list of integers, using a tokenizer.\r\n\r\nI noticed that `map` function with `num_proc=mp.cpu_count() \/\/2` takes more than 20 hours to finish the job where as `num_proc=1` gets the job done in about 5 hours. The machine I used has 40 cores, with 126G of RAM. There were no other jobs when `map` function was running.\r\n\r\nWhat could be the reason? I would be happy to provide information necessary to spot the reason.\r\n\r\np.s. I was experiencing the imbalance issue mentioned in [here](https:\/\/github.com\/huggingface\/datasets\/issues\/610#issuecomment-705177036) when I was using multi processing.\r\np.s.2 When I run `map` with `num_proc=1`, I see one tqdm bar but all the cores are working. When `num_proc=20`, only 20 cores work. \r\n![Screen Shot 2021-03-05 at 11 04 59](https:\/\/user-images.githubusercontent.com\/29157715\/110056895-ef6cf000-7da2-11eb-8307-6698e9fb1ad4.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1992\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1992\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1991","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1991\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1991\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1991\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1991","id":822554473,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg1MTYwNDkx","number":1991,"title":"Adding the conllpp dataset","user":{"login":"ZihanWangKi","id":21319243,"node_id":"MDQ6VXNlcjIxMzE5MjQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21319243?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZihanWangKi","html_url":"https:\/\/github.com\/ZihanWangKi","followers_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/followers","following_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/repos","events_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-04T22:19:43Z","updated_at":"2021-03-17T10:37:39Z","closed_at":"2021-03-17T10:37:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1991","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1991","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1991.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1991.patch","merged_at":"2021-03-17T10:37:39Z"},"body":"Adding the conllpp dataset, is a revision from https:\/\/github.com\/huggingface\/datasets\/pull\/1910.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1991\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1991\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1990","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1990\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1990\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1990\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1990","id":822384502,"node_id":"MDU6SXNzdWU4MjIzODQ1MDI=","number":1990,"title":"OSError: Memory mapping file failed: Cannot allocate memory","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-04T18:21:58Z","updated_at":"2021-08-04T18:04:25Z","closed_at":"2021-08-04T18:04:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI am trying to run a code with a wikipedia dataset, here is the command to reproduce the error. You can find the codes for run_mlm.py in huggingface repo here: https:\/\/github.com\/huggingface\/transformers\/blob\/v4.3.2\/examples\/language-modeling\/run_mlm.py \r\n```\r\npython run_mlm.py --model_name_or_path bert-base-multilingual-cased --dataset_name wikipedia --dataset_config_name 20200501.en --do_train --do_eval --output_dir \/dara\/test --max_seq_length 128\r\n```\r\n\r\nI am using transformer version: 4.3.2 \r\n\r\nBut I got memory erorr using this dataset, is there a way I could save on memory with dataset library with wikipedia dataset?\r\nSpecially I need to train a model with multiple of wikipedia datasets concatenated. thank you very much @lhoestq for your help and suggestions:\r\n\r\n```\r\n File \"run_mlm.py\", line 441, in \r\n main()\r\n File \"run_mlm.py\", line 233, in main\r\n split=f\"train[{data_args.validation_split_percentage}%:]\",\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/load.py\", line 750, in load_dataset\r\n ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/builder.py\", line 740, in as_dataset\r\n map_tuple=True,\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n return function(data_struct)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/builder.py\", line 757, in _build_single_dataset\r\n in_memory=in_memory,\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/builder.py\", line 829, in _as_dataset\r\n in_memory=in_memory,\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/arrow_reader.py\", line 215, in read\r\n return self.read_files(files=files, original_instructions=instructions, in_memory=in_memory)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/arrow_reader.py\", line 236, in read_files\r\n pa_table = self._read_files(files, in_memory=in_memory)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/arrow_reader.py\", line 171, in _read_files\r\n pa_table: pa.Table = self._get_dataset_from_filename(f_dict, in_memory=in_memory)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/arrow_reader.py\", line 302, in _get_dataset_from_filename\r\n pa_table = ArrowReader.read_table(filename, in_memory=in_memory)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/arrow_reader.py\", line 322, in read_table\r\n stream = stream_from(filename)\r\n File \"pyarrow\/io.pxi\", line 782, in pyarrow.lib.memory_map\r\n File \"pyarrow\/io.pxi\", line 743, in pyarrow.lib.MemoryMappedFile._open\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 99, in pyarrow.lib.check_status\r\nOSError: Memory mapping file failed: Cannot allocate memory\r\n```\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1990\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1990\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1989","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1989\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1989\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1989\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1989","id":822328147,"node_id":"MDU6SXNzdWU4MjIzMjgxNDc=","number":1989,"title":"Question\/problem with dataset labels","user":{"login":"ioana-blue","id":17202292,"node_id":"MDQ6VXNlcjE3MjAyMjky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17202292?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ioana-blue","html_url":"https:\/\/github.com\/ioana-blue","followers_url":"https:\/\/api.github.com\/users\/ioana-blue\/followers","following_url":"https:\/\/api.github.com\/users\/ioana-blue\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ioana-blue\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ioana-blue\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ioana-blue\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ioana-blue\/orgs","repos_url":"https:\/\/api.github.com\/users\/ioana-blue\/repos","events_url":"https:\/\/api.github.com\/users\/ioana-blue\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ioana-blue\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-03-04T17:06:53Z","updated_at":"2021-03-11T09:44:15Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I'm using a dataset with two labels \"nurse\" and \"not nurse\". For whatever reason (that I don't understand), I get an error that I think comes from the datasets package (using csv). Everything works fine if the labels are \"nurse\" and \"surgeon\". \r\n\r\nThis is the trace I get:\r\n\r\n```\r\nFile \"..\/..\/..\/models\/tr-4.3.2\/run_puppets.py\", line 523, in \r\n main()\r\n File \"..\/..\/..\/models\/tr-4.3.2\/run_puppets.py\", line 249, in main\r\n datasets = load_dataset(\"csv\", data_files=data_files)\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 740, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 572, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 650, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 1028, in _prepare_split\r\n writer.write_table(table)\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py\", line 292, in write_table\r\n pa_table = pa_table.cast(self._schema)\r\n File \"pyarrow\/table.pxi\", line 1311, in pyarrow.lib.Table.cast\r\n File \"pyarrow\/table.pxi\", line 265, in pyarrow.lib.ChunkedArray.cast\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/pyarrow\/compute.py\", line 87, in cast\r\n return call_function(\"cast\", [arr], options)\r\n File \"pyarrow\/_compute.pyx\", line 298, in pyarrow._compute.call_function\r\n File \"pyarrow\/_compute.pyx\", line 192, in pyarrow._compute.Function.call\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Failed to parse string: not nurse\r\n```\r\n\r\nAny ideas how to fix this? For now, I'll probably make them numeric. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1989\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1989\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1988","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1988\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1988\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1988\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1988","id":822324605,"node_id":"MDU6SXNzdWU4MjIzMjQ2MDU=","number":1988,"title":"Readme.md is misleading about kinds of datasets?","user":{"login":"surak","id":878399,"node_id":"MDQ6VXNlcjg3ODM5OQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/878399?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/surak","html_url":"https:\/\/github.com\/surak","followers_url":"https:\/\/api.github.com\/users\/surak\/followers","following_url":"https:\/\/api.github.com\/users\/surak\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/surak\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/surak\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/surak\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/surak\/orgs","repos_url":"https:\/\/api.github.com\/users\/surak\/repos","events_url":"https:\/\/api.github.com\/users\/surak\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/surak\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-04T17:04:20Z","updated_at":"2021-08-04T18:05:23Z","closed_at":"2021-08-04T18:05:23Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi!\r\n\r\nAt the README.MD, you say: \"efficient data pre-processing: simple, fast and reproducible data pre-processing for the above public datasets as well as your own local datasets in CSV\/JSON\/text. \"\r\n\r\nBut here:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/master\/templates\/new_dataset_script.py#L82-L117\r\n\r\nYou mention other kinds of datasets, with images and so on. I'm confused. \r\n\r\nIs it possible to use it to store, say, imagenet locally? ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1988\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1988\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1987","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1987\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1987\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1987\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1987","id":822308956,"node_id":"MDU6SXNzdWU4MjIzMDg5NTY=","number":1987,"title":"wmt15 is broken","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-04T16:46:25Z","updated_at":"2021-03-04T16:46:25Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"While testing the hotfix, I tried a random other wmt release and found wmt15 to be broken:\r\n```\r\npython -c 'from datasets import load_dataset; load_dataset(\"wmt15\", \"de-en\")' \r\nDownloading: 2.91kB [00:00, 818kB\/s]\r\nDownloading: 3.02kB [00:00, 897kB\/s]\r\nDownloading: 41.1kB [00:00, 19.1MB\/s]\r\nDownloading and preparing dataset wmt15\/de-en (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/stas\/.cache\/huggingface\/datasets\/wmt15\/de-en\/1.0.0\/39ad5f9262a0910a8ad7028ad432731ad23fdf91f2cebbbf2ba4776b9859e87f...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 740, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 578, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 634, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/stas\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt15\/39ad5f9262a0910a8ad7028ad432731ad23fdf91f2cebbbf2ba4776b9859e87f\/wmt_utils.py\", line 757, in _split_generators\r\n downloaded_files = dl_manager.download_and_extract(urls_to_download)\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 283, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 191, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 203, in map_nested\r\n mapped = [\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 204, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 160, in _single_map_nested\r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 160, in \r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 142, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 214, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 274, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 614, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/huggingface.co\/datasets\/wmt\/wmt15\/resolve\/main\/training-parallel-nc-v10.tgz\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1987\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1987\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1986","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1986\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1986\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1986\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1986","id":822176290,"node_id":"MDU6SXNzdWU4MjIxNzYyOTA=","number":1986,"title":"wmt datasets fail to load","user":{"login":"sabania","id":32322564,"node_id":"MDQ6VXNlcjMyMzIyNTY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32322564?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sabania","html_url":"https:\/\/github.com\/sabania","followers_url":"https:\/\/api.github.com\/users\/sabania\/followers","following_url":"https:\/\/api.github.com\/users\/sabania\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sabania\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sabania\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sabania\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sabania\/orgs","repos_url":"https:\/\/api.github.com\/users\/sabania\/repos","events_url":"https:\/\/api.github.com\/users\/sabania\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sabania\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-04T14:18:55Z","updated_at":"2021-03-04T14:31:07Z","closed_at":"2021-03-04T14:31:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"~\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\wmt14\\43e717d978d2261502b0194999583acb874ba73b0f4aed0ada2889d1bb00f36e\\wmt_utils.py in _split_generators(self, dl_manager)\r\n 758 # Extract manually downloaded files.\r\n 759 manual_files = dl_manager.extract(manual_paths_dict)\r\n--> 760 extraction_map = dict(downloaded_files, **manual_files)\r\n 761 \r\n 762 for language in self.config.language_pair:\r\n\r\nTypeError: type object argument after ** must be a mapping, not list","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1986\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1986\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1985","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1985\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1985\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1985\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1985","id":822170651,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg0ODM4NjIw","number":1985,"title":"Optimize int precision","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-03-04T14:12:23Z","updated_at":"2021-03-22T12:04:40Z","closed_at":"2021-03-16T09:44:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1985","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1985","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1985.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1985.patch","merged_at":"2021-03-16T09:44:00Z"},"body":"Optimize int precision to reduce dataset file size.\r\n\r\nClose #1973, close #1825, close #861.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1985\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":3,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1985\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1984","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1984\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1984\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1984\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1984","id":821816588,"node_id":"MDU6SXNzdWU4MjE4MTY1ODg=","number":1984,"title":"Add tests for WMT datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-04T06:46:42Z","updated_at":"2021-03-04T06:46:42Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As requested in #1981, we need tests for WMT datasets, using dummy data.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1984\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1984\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1983","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1983\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1983\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1983\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1983","id":821746008,"node_id":"MDU6SXNzdWU4MjE3NDYwMDg=","number":1983,"title":"The size of CoNLL-2003 is not consistant with the official release.","user":{"login":"h-peng17","id":39556019,"node_id":"MDQ6VXNlcjM5NTU2MDE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39556019?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/h-peng17","html_url":"https:\/\/github.com\/h-peng17","followers_url":"https:\/\/api.github.com\/users\/h-peng17\/followers","following_url":"https:\/\/api.github.com\/users\/h-peng17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/h-peng17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/h-peng17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/h-peng17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/h-peng17\/orgs","repos_url":"https:\/\/api.github.com\/users\/h-peng17\/repos","events_url":"https:\/\/api.github.com\/users\/h-peng17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/h-peng17\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-04T04:41:34Z","updated_at":"2021-03-08T16:24:25Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Thanks for the dataset sharing! But when I use conll-2003, I meet some questions.\r\nThe statistics of conll-2003 in this repo is : \r\n\\#train 14041 \\#dev 3250 \\#test 3453\r\nWhile the official statistics is:\r\n\\#train 14987 \\#dev 3466 \\#test 3684\r\nWish for your reply~","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1983\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1983\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1982","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1982\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1982\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1982\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1982","id":821448791,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg0MjM2NzQ0","number":1982,"title":"Fix NestedDataStructure.data for empty dict","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-03-03T20:16:51Z","updated_at":"2021-03-04T16:46:04Z","closed_at":"2021-03-03T22:48:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1982","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1982","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1982.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1982.patch","merged_at":"2021-03-03T22:48:36Z"},"body":"Fix #1981","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1982\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1982\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1981","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1981\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1981\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1981\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1981","id":821411109,"node_id":"MDU6SXNzdWU4MjE0MTExMDk=","number":1981,"title":"wmt datasets fail to load","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-03-03T19:21:39Z","updated_at":"2021-03-04T14:16:47Z","closed_at":"2021-03-03T22:48:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"on master:\r\n```\r\npython -c 'from datasets import load_dataset; load_dataset(\"wmt14\", \"de-en\")'\r\nDownloading and preparing dataset wmt14\/de-en (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/stas\/.cache\/huggingface\/datasets\/wmt14\/de-en\/1.0.0\/43e717d978d2261502b0194999583acb874ba73b0f4aed0ada2889d1bb00f36e...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 740, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/builder.py\", line 578, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/builder.py\", line 634, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/stas\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt14\/43e717d978d2261502b0194999583acb874ba73b0f4aed0ada2889d1bb00f36e\/wmt_utils.py\", line 760, in _split_generators\r\n extraction_map = dict(downloaded_files, **manual_files)\r\n```\r\n\r\nit worked fine recently. same problem if I try wmt16.\r\n\r\ngit bisect points to this commit from Feb 25 as the culprit https:\/\/github.com\/huggingface\/datasets\/commit\/792f1d9bb1c5361908f73e2ef7f0181b2be409fa\r\n\r\n@albertvillanova ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1981\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":1,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1981\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1980","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1980\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1980\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1980\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1980","id":821312810,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg0MTI1OTUy","number":1980,"title":"Loading all answers from drop","user":{"login":"KaijuML","id":25499439,"node_id":"MDQ6VXNlcjI1NDk5NDM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25499439?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KaijuML","html_url":"https:\/\/github.com\/KaijuML","followers_url":"https:\/\/api.github.com\/users\/KaijuML\/followers","following_url":"https:\/\/api.github.com\/users\/KaijuML\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KaijuML\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KaijuML\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KaijuML\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KaijuML\/orgs","repos_url":"https:\/\/api.github.com\/users\/KaijuML\/repos","events_url":"https:\/\/api.github.com\/users\/KaijuML\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KaijuML\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-03T17:13:07Z","updated_at":"2021-03-15T11:27:26Z","closed_at":"2021-03-15T11:27:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1980","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1980","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1980.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1980.patch","merged_at":"2021-03-15T11:27:26Z"},"body":"Hello all,\r\n\r\nI propose this change to the DROP loading script so that all answers are loaded no matter their type. Currently, only \"span\" answers are loaded, which excludes a significant amount of answers from drop (i.e. \"number\" and \"date\").\r\n\r\nI updated the script with the version I use for my work. However, I couldn't find a way to verify that all is working when integrated with the datasets repo, since the `load_dataset` method seems to always download the script from github and not local files.\r\n\r\nNote that 9 items from the train set have no answers, as well as 1 from the validation set. The script I propose simply do not load them.\r\n\r\nLet me know if there is anything else I can do,\r\nCl\u00e9ment","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1980\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1980\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1979","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1979\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1979\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1979\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1979","id":820977853,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgzODQ3MTk3","number":1979,"title":"Add article_id and process test set template for semeval 2020 task 11\u2026","user":{"login":"hemildesai","id":8195444,"node_id":"MDQ6VXNlcjgxOTU0NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8195444?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hemildesai","html_url":"https:\/\/github.com\/hemildesai","followers_url":"https:\/\/api.github.com\/users\/hemildesai\/followers","following_url":"https:\/\/api.github.com\/users\/hemildesai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hemildesai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hemildesai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hemildesai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hemildesai\/orgs","repos_url":"https:\/\/api.github.com\/users\/hemildesai\/repos","events_url":"https:\/\/api.github.com\/users\/hemildesai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hemildesai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-03T10:34:32Z","updated_at":"2021-03-13T10:59:40Z","closed_at":"2021-03-12T13:10:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1979","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1979","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1979.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1979.patch","merged_at":"2021-03-12T13:10:50Z"},"body":"\u2026 dataset\r\n\r\n- `article_id` is needed to create the submission file for the task at https:\/\/propaganda.qcri.org\/semeval2020-task11\/\r\n- The `technique classification` task provides the span indices in a template for the test set that is necessary to complete the task. This PR implements processing of that template for the dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1979\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1979\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1978","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1978\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1978\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1978\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1978","id":820956806,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgzODI5Njgz","number":1978,"title":"Adding ro sts dataset","user":{"login":"lorinczb","id":36982089,"node_id":"MDQ6VXNlcjM2OTgyMDg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36982089?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lorinczb","html_url":"https:\/\/github.com\/lorinczb","followers_url":"https:\/\/api.github.com\/users\/lorinczb\/followers","following_url":"https:\/\/api.github.com\/users\/lorinczb\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lorinczb\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lorinczb\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lorinczb\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lorinczb\/orgs","repos_url":"https:\/\/api.github.com\/users\/lorinczb\/repos","events_url":"https:\/\/api.github.com\/users\/lorinczb\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lorinczb\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-03T10:08:53Z","updated_at":"2021-03-05T10:00:14Z","closed_at":"2021-03-05T09:33:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1978","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1978","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1978.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1978.patch","merged_at":"2021-03-05T09:33:55Z"},"body":"Adding [RO-STS](https:\/\/github.com\/dumitrescustefan\/RO-STS) dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1978\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1978\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1977","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1977\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1977\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1977\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1977","id":820312022,"node_id":"MDU6SXNzdWU4MjAzMTIwMjI=","number":1977,"title":"ModuleNotFoundError: No module named 'apache_beam' for wikipedia datasets ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-02T19:21:28Z","updated_at":"2021-03-03T10:17:40Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am trying to run run_mlm.py code [1] of huggingface with following \"wikipedia\"\/ \"20200501.aa\" dataset:\r\n\r\n`python run_mlm.py --model_name_or_path bert-base-multilingual-cased --dataset_name wikipedia --dataset_config_name 20200501.aa --do_train --do_eval --output_dir \/tmp\/test-mlm --max_seq_length 256\r\n`\r\n\r\nI am getting this error, but as per documentation, huggingface dataset provide processed version of this dataset and users can load it without requiring setup extra settings for apache-beam. could you help me please to load this dataset? \r\nDo you think I can run run_ml.py with this dataset? or anyway I could subsample and train the model? I greatly appreciate providing the processed version of all languages for this dataset, which allow the user to use them without setting up apache-beam,. thanks \r\n\r\nI really appreciate your help.\r\n@lhoestq \r\n\r\nthanks.\r\n\r\n[1] https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/language-modeling\/run_mlm.py\r\n\r\nerror I get: \r\n\r\n```\r\n>>> import datasets \r\n>>> datasets.load_dataset(\"wikipedia\", \"20200501.aa\")\r\nDownloading and preparing dataset wikipedia\/20200501.aa (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/dara\/temp\/cache_home_2\/datasets\/wikipedia\/20200501.aa\/1.0.0\/4021357e28509391eab2f8300d9b689e7e8f3a877ebb3d354b01577d497ebc63...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/dara\/temp\/libs\/anaconda3\/envs\/codes\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/load.py\", line 746, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/dara\/temp\/libs\/anaconda3\/envs\/codes\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/builder.py\", line 573, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/dara\/temp\/libs\/anaconda3\/envs\/codes\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/builder.py\", line 1099, in _download_and_prepare\r\n import apache_beam as beam\r\nModuleNotFoundError: No module named 'apache_beam'\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1977\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1977\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1976","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1976\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1976\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1976\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976","id":820228538,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgzMjA3NDI4","number":1976,"title":"Add datasets full offline mode with HF_DATASETS_OFFLINE","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-02T17:26:59Z","updated_at":"2021-03-03T15:45:31Z","closed_at":"2021-03-03T15:45:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1976","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976.patch","merged_at":"2021-03-03T15:45:30Z"},"body":"Add the HF_DATASETS_OFFLINE environment variable for users who want to use `datasets` offline without having to wait for the network timeouts\/retries to happen. This was requested in https:\/\/github.com\/huggingface\/datasets\/issues\/1939\r\n\r\ncc @stas00 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1976\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1976\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1975","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1975\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1975\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1975\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1975","id":820205485,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgzMTg4NjM3","number":1975,"title":"Fix flake8","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-02T16:59:13Z","updated_at":"2021-03-04T10:43:22Z","closed_at":"2021-03-04T10:43:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1975","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1975","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1975.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1975.patch","merged_at":"2021-03-04T10:43:22Z"},"body":"Fix flake8 style.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1975\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1975\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1974","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1974\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1974\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1974\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1974","id":820122223,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgzMTE5MDI0","number":1974,"title":"feat(docs): navigate with left\/right arrow keys","user":{"login":"ydcjeff","id":32727188,"node_id":"MDQ6VXNlcjMyNzI3MTg4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32727188?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ydcjeff","html_url":"https:\/\/github.com\/ydcjeff","followers_url":"https:\/\/api.github.com\/users\/ydcjeff\/followers","following_url":"https:\/\/api.github.com\/users\/ydcjeff\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ydcjeff\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ydcjeff\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ydcjeff\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ydcjeff\/orgs","repos_url":"https:\/\/api.github.com\/users\/ydcjeff\/repos","events_url":"https:\/\/api.github.com\/users\/ydcjeff\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ydcjeff\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-02T15:24:50Z","updated_at":"2021-03-04T10:44:12Z","closed_at":"2021-03-04T10:42:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1974","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1974","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1974.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1974.patch","merged_at":"2021-03-04T10:42:48Z"},"body":"Enables docs navigation with left\/right arrow keys. It can be useful for the ones who navigate with keyboard a lot.\r\nMore info : https:\/\/github.com\/sphinx-doc\/sphinx\/pull\/2064\r\n\r\nYou can try here : https:\/\/29353-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/index.html","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1974\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1974\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1973","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1973\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1973\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1973\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1973","id":820077312,"node_id":"MDU6SXNzdWU4MjAwNzczMTI=","number":1973,"title":"Question: what gets stored in the datasets cache and why is it so huge?","user":{"login":"ioana-blue","id":17202292,"node_id":"MDQ6VXNlcjE3MjAyMjky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17202292?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ioana-blue","html_url":"https:\/\/github.com\/ioana-blue","followers_url":"https:\/\/api.github.com\/users\/ioana-blue\/followers","following_url":"https:\/\/api.github.com\/users\/ioana-blue\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ioana-blue\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ioana-blue\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ioana-blue\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ioana-blue\/orgs","repos_url":"https:\/\/api.github.com\/users\/ioana-blue\/repos","events_url":"https:\/\/api.github.com\/users\/ioana-blue\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ioana-blue\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":8,"created_at":"2021-03-02T14:35:53Z","updated_at":"2021-03-30T14:03:59Z","closed_at":"2021-03-16T09:44:00Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm running several training jobs (around 10) with a relatively large dataset (3M samples). The datasets cache reached 178G and it seems really large. What is it stored in there and why is it so large? I don't think I noticed this problem before and seems to be related to the new version of the datasets library. Any insight? Thank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1973\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1973\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1972","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1972\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1972\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1972\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1972","id":819752761,"node_id":"MDU6SXNzdWU4MTk3NTI3NjE=","number":1972,"title":"'Dataset' object has no attribute 'rename_column'","user":{"login":"farooqzaman1","id":23195502,"node_id":"MDQ6VXNlcjIzMTk1NTAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23195502?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/farooqzaman1","html_url":"https:\/\/github.com\/farooqzaman1","followers_url":"https:\/\/api.github.com\/users\/farooqzaman1\/followers","following_url":"https:\/\/api.github.com\/users\/farooqzaman1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/farooqzaman1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/farooqzaman1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/farooqzaman1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/farooqzaman1\/orgs","repos_url":"https:\/\/api.github.com\/users\/farooqzaman1\/repos","events_url":"https:\/\/api.github.com\/users\/farooqzaman1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/farooqzaman1\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-02T08:01:49Z","updated_at":"2021-03-02T13:08:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"'Dataset' object has no attribute 'rename_column'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1972\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1972\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1971","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1971\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1971\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1971\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1971","id":819714231,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgyNzgyNTU0","number":1971,"title":"Fix ArrowWriter closes stream at exit","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-02T07:12:34Z","updated_at":"2021-03-10T16:36:57Z","closed_at":"2021-03-10T16:36:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1971","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1971","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1971.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1971.patch","merged_at":"2021-03-10T16:36:56Z"},"body":"Current implementation of ArrowWriter does not properly release the `stream` resource (by closing it) if its `finalize()` method is not called and\/or an Exception is raised before\/during the call to its `finalize()` method.\r\n\r\nTherefore, ArrowWriter should be used as a context manager that properly closes its `stream` resource at exit.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1971\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1971\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1970","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1970\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1970\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1970\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1970","id":819500620,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgyNjAzMzEw","number":1970,"title":"Fixing the URL filtering for bad MLSUM examples in GEM","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-02T01:22:58Z","updated_at":"2021-03-02T03:19:06Z","closed_at":"2021-03-02T02:01:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1970","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1970","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1970.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1970.patch","merged_at":"2021-03-02T02:01:33Z"},"body":"This updates the code and metadata to use the updated `gem_mlsum_bad_ids_fixed.json` file provided by @juand-r\r\n\r\ncc @sebastianGehrmann ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1970\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1970\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1967","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1967\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1967\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1967\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1967","id":819129568,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgyMjc5OTEx","number":1967,"title":"Add Turkish News Category Dataset - 270K - Lite Version","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-01T18:21:59Z","updated_at":"2021-03-02T17:25:00Z","closed_at":"2021-03-02T17:25:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1967","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1967","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1967.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1967.patch","merged_at":"2021-03-02T17:25:00Z"},"body":"This PR adds the Turkish News Categories Dataset (270K - Lite Version) dataset which is a text classification dataset by me, @basakbuluz and @serdarakyol.\r\nThis dataset contains the same news from the current [interpress_news_category_tr dataset](https:\/\/huggingface.co\/datasets\/interpress_news_category_tr) but contains less information, OCR errors are reduced, can be easily separated, and can be divided into 10 classes (\"k\u00fclt\u00fcrsanat\", \"ekonomi\", \"siyaset\", \"e\u011fitim\", \"d\u00fcnya\", \"spor\", \"teknoloji\", \"magazin\", \"sa\u011fl\u0131k\", \"g\u00fcndem\") were rearranged.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1967\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1967\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1966","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1966\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1966\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1966\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1966","id":819101253,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgyMjU2MzE0","number":1966,"title":"Fix metrics collision in separate multiprocessed experiments","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-01T17:45:18Z","updated_at":"2021-03-02T13:05:45Z","closed_at":"2021-03-02T13:05:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1966","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1966","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1966.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1966.patch","merged_at":"2021-03-02T13:05:44Z"},"body":"As noticed in #1942 , there's a issue with locks if you run multiple separate evaluation experiments in a multiprocessed setup.\r\n\r\nIndeed there is a time span in Metric._finalize() where the process 0 loses its lock before re-acquiring it. This is bad since the lock of the process 0 tells the other process that the corresponding cache file is available for writing\/reading\/deleting: we end up having one metric cache that collides with another one. This can raise FileNotFound errors when a metric tries to read the cache file and if the second conflicting metric deleted it.\r\n\r\nTo fix that I made sure that the lock file of the process 0 stays acquired from the cache file creation to the end of the metric computation. This way the other metrics can simply sample a new hashing name in order to avoid the collision.\r\n\r\nFinally I added missing tests for separate experiments in distributed setup.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1966\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1966\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1965","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1965\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1965\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1965\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1965","id":818833460,"node_id":"MDU6SXNzdWU4MTg4MzM0NjA=","number":1965,"title":"Can we parallelized the add_faiss_index process over dataset shards ?","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-01T12:47:34Z","updated_at":"2021-03-04T19:40:56Z","closed_at":"2021-03-04T19:40:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am thinking of making the **add_faiss_index** process faster. What if we run the add_faiss_index process on separate dataset shards and then combine them before (dataset.concatenate) saving the faiss.index file ?\r\n\r\nI feel theoretically this will reduce the accuracy of retrieval since it affects the indexing process.\r\n\r\n@lhoestq\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1965\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1965\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1964","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1964\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1964\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1964\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1964","id":818624864,"node_id":"MDU6SXNzdWU4MTg2MjQ4NjQ=","number":1964,"title":"Datasets.py function load_dataset does not match squad dataset","user":{"login":"LeopoldACC","id":44536699,"node_id":"MDQ6VXNlcjQ0NTM2Njk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44536699?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LeopoldACC","html_url":"https:\/\/github.com\/LeopoldACC","followers_url":"https:\/\/api.github.com\/users\/LeopoldACC\/followers","following_url":"https:\/\/api.github.com\/users\/LeopoldACC\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LeopoldACC\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LeopoldACC\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LeopoldACC\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LeopoldACC\/orgs","repos_url":"https:\/\/api.github.com\/users\/LeopoldACC\/repos","events_url":"https:\/\/api.github.com\/users\/LeopoldACC\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LeopoldACC\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-01T08:41:31Z","updated_at":"2021-03-04T15:09:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"### 1 When I try to train lxmert,and follow the code in README that --dataset name:\r\n```shell \r\npython examples\/question-answering\/run_qa.py --model_name_or_path unc-nlp\/lxmert-base-uncased --dataset_name squad --do_train --do_eval --per_device_train_batch_size 12 --learning_rate 3e-5 --num_train_epochs 2 --max_seq_length 384 --doc_stride 128 --output_dir \/home2\/zhenggo1\/checkpoint\/lxmert_squad\r\n```\r\nthe bug is that:\r\n```\r\nDownloading and preparing dataset squad\/plain_text (download: 33.51 MiB, generated: 85.75 MiB, post-processed: Unknown size, total: 119.27 MiB) to \/home2\/zhenggo1\/.cache\/huggingface\/datasets\/squad\/plain_text\/1.0.0\/4c81550d83a2ac7c7ce23783bd8ff36642800e6633c1f18417fb58c3ff50cdd7...\r\nTraceback (most recent call last):\r\n File \"examples\/question-answering\/run_qa.py\", line 501, in \r\n main()\r\n File \"examples\/question-answering\/run_qa.py\", line 217, in main\r\n datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name)\r\n File \"\/home2\/zhenggo1\/anaconda3\/envs\/lpot\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 746, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home2\/zhenggo1\/anaconda3\/envs\/lpot\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 573, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home2\/zhenggo1\/anaconda3\/envs\/lpot\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 633, in _download_and_prepare\r\n self.info.download_checksums, dl_manager.get_recorded_sizes_checksums(), \"dataset source files\"\r\n File \"\/home2\/zhenggo1\/anaconda3\/envs\/lpot\/lib\/python3.7\/site-packages\/datasets\/utils\/info_utils.py\", line 39, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/rajpurkar.github.io\/SQuAD-explorer\/dataset\/train-v1.1.json']\r\n```\r\nAnd I try to find the [checksum link](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/squad\/dataset_infos.json)\r\n,is the problem plain_text do not have a checksum?\r\n\r\n### 2 When I try to train lxmert,and use local dataset:\r\n```\r\npython examples\/question-answering\/run_qa.py --model_name_or_path unc-nlp\/lxmert-base-uncased --train_file $SQUAD_DIR\/train-v1.1.json --validation_file $SQUAD_DIR\/dev-v1.1.json --do_train --do_eval --per_device_train_batch_size 12 --learning_rate 3e-5 --num_train_epochs 2 --max_seq_length 384 --doc_stride 128 --output_dir \/home2\/zhenggo1\/checkpoint\/lxmert_squad\r\n```\r\nThe bug is that \r\n```\r\n['title', 'paragraphs']\r\nTraceback (most recent call last):\r\n File \"examples\/question-answering\/run_qa.py\", line 501, in \r\n main()\r\n File \"examples\/question-answering\/run_qa.py\", line 273, in main\r\n answer_column_name = \"answers\" if \"answers\" in column_names else column_names[2]\r\nIndexError: list index out of range\r\n```\r\nI print the answer_column_name and find that local squad dataset need the package datasets to preprocessing so that the code below can work:\r\n```\r\nif training_args.do_train:\r\n column_names = datasets[\"train\"].column_names\r\n else:\r\n column_names = datasets[\"validation\"].column_names\r\n print(datasets[\"train\"].column_names)\r\n question_column_name = \"question\" if \"question\" in column_names else column_names[0]\r\n context_column_name = \"context\" if \"context\" in column_names else column_names[1]\r\n answer_column_name = \"answers\" if \"answers\" in column_names else column_names[2]\r\n``` \r\n## Please tell me how to fix the bug,thks a lot!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1964\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1964\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1963","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1963\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1963\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1963\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1963","id":818289967,"node_id":"MDU6SXNzdWU4MTgyODk5Njc=","number":1963,"title":"bug in SNLI dataset ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-28T19:36:20Z","updated_at":"2021-03-01T12:01:29Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nThere is label of -1 in train set of SNLI dataset, please find the code below:\r\n\r\n```\r\nimport numpy as np \r\nimport datasets \r\ndata = datasets.load_dataset(\"snli\")[\"train\"]\r\nlabels = []\r\nfor d in data:\r\n labels.append(d[\"label\"])\r\nprint(np.unique(labels))\r\n```\r\n\r\nand results:\r\n\r\n`[-1 0 1 2]`\r\n\r\nversion of datasets used:\r\n`datasets 1.2.1 \r\n`\r\n\r\nthanks for your help. @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1963\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1963\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1962","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1962\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1962\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1962\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1962","id":818089156,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgxNDQwNzM4","number":1962,"title":"Fix unused arguments","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-28T02:47:07Z","updated_at":"2021-03-11T02:18:17Z","closed_at":"2021-03-03T16:37:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1962","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1962","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1962.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1962.patch","merged_at":"2021-03-03T16:37:50Z"},"body":"Noticed some args in the codebase are not used, so managed to find all such occurrences with Pylance and fix them.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1962\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1962\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1961","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1961\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1961\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1961\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1961","id":818077947,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgxNDM3NDI0","number":1961,"title":"Add sst dataset","user":{"login":"patpizio","id":15801338,"node_id":"MDQ6VXNlcjE1ODAxMzM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15801338?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patpizio","html_url":"https:\/\/github.com\/patpizio","followers_url":"https:\/\/api.github.com\/users\/patpizio\/followers","following_url":"https:\/\/api.github.com\/users\/patpizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patpizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patpizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patpizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patpizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/patpizio\/repos","events_url":"https:\/\/api.github.com\/users\/patpizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patpizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-28T02:08:29Z","updated_at":"2021-03-04T10:38:53Z","closed_at":"2021-03-04T10:38:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1961","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1961","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1961.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1961.patch","merged_at":"2021-03-04T10:38:53Z"},"body":"Related to #1934—Add the Stanford Sentiment Treebank dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1961\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1961\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1960","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1960\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1960\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1960\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1960","id":818073154,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgxNDMzOTY4","number":1960,"title":"Allow stateful function in dataset.map","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-28T01:29:05Z","updated_at":"2021-03-23T15:26:49Z","closed_at":"2021-03-23T15:26:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1960","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1960","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1960.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1960.patch","merged_at":"2021-03-23T15:26:49Z"},"body":"Removes the \"test type\" section in Dataset.map which would modify the state of the stateful function. Now, the return type of the map function is inferred after processing the first example.\r\n\r\nFixes #1940 \r\n\r\n@lhoestq Not very happy with the usage of `nonlocal`. Would like to hear your opinion on this.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1960\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1960\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1959","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1959\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1959\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1959\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1959","id":818055644,"node_id":"MDU6SXNzdWU4MTgwNTU2NDQ=","number":1959,"title":"Bug in skip_rows argument of load_dataset function ?","user":{"login":"LedaguenelArthur","id":73159756,"node_id":"MDQ6VXNlcjczMTU5NzU2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73159756?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LedaguenelArthur","html_url":"https:\/\/github.com\/LedaguenelArthur","followers_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/followers","following_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/orgs","repos_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/repos","events_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-27T23:32:54Z","updated_at":"2021-03-09T10:21:32Z","closed_at":"2021-03-09T10:21:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello everyone,\r\n\r\nI'm quite new to Git so sorry in advance if I'm breaking some ground rules of issues posting... :\/\r\nI tried to use the load_dataset function, from Huggingface datasets library, on a csv file using the skip_rows argument described on Huggingface page to skip the first row containing column names\r\n\r\n`test_dataset = load_dataset('csv', data_files=['test_wLabel.tsv'], delimiter='\\t', column_names=[\"id\", \"sentence\", \"label\"], skip_rows=1)`\r\n\r\nBut I got the following error message\r\n\r\n`__init__() got an unexpected keyword argument 'skip_rows'`\r\n\r\nHave I used the wrong argument ? Am I missing something or is this a bug ?\r\n\r\nThank you very much for your time,\r\nBest regards,\r\nArthur","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1959\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1959\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1958","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1958\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1958\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1958\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1958","id":818037548,"node_id":"MDU6SXNzdWU4MTgwMzc1NDg=","number":1958,"title":"XSum dataset download link broken","user":{"login":"himat","id":1156974,"node_id":"MDQ6VXNlcjExNTY5NzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1156974?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/himat","html_url":"https:\/\/github.com\/himat","followers_url":"https:\/\/api.github.com\/users\/himat\/followers","following_url":"https:\/\/api.github.com\/users\/himat\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/himat\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/himat\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/himat\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/himat\/orgs","repos_url":"https:\/\/api.github.com\/users\/himat\/repos","events_url":"https:\/\/api.github.com\/users\/himat\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/himat\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-27T21:47:56Z","updated_at":"2021-02-27T21:50:16Z","closed_at":"2021-02-27T21:50:16Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I did \r\n```\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"xsum\")\r\n```\r\n\r\nThis returns\r\n`ConnectionError: Couldn't reach http:\/\/bollin.inf.ed.ac.uk\/public\/direct\/XSUM-EMNLP18-Summary-Data-Original.tar.gz`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1958\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1958\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1957","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1957\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1957\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1957\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1957","id":818014624,"node_id":"MDU6SXNzdWU4MTgwMTQ2MjQ=","number":1957,"title":"[request] make load_metric api intutive","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-27T20:43:54Z","updated_at":"2021-02-27T22:21:10Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nmetric = load_metric('glue', 'mrpc', num_process=num_process, process_id=rank)\r\n```\r\n\r\nMay I suggest that `num_process` is confusing as it's singular yet expects a plural value and either \r\n* be deprecated in favor of `num_processes` which is more intuitive since it's plural as its expected value\r\n* or even better why not mimic the established dist environment convention for that purpose, which uses `world_size`. \r\n\r\nSame for `process_id` - why reinvent the naming and needing to explain that this is **NOT** `PID`, when we have `rank` already. That is:\r\n\r\n```\r\nmetric = load_metric('glue', 'mrpc', world_size=world_size, rank=rank)\r\n```\r\n\r\nThis then fits like a glove into the pytorch DDP and alike envs. and we just need to call:\r\n\r\n* `dist.get_world_size()`\r\n* `dist.get_rank()`\r\n\r\nSo it'd be as simple as:\r\n\r\n```\r\nmetric = load_metric('glue', 'mrpc', world_size=dist.get_world_size(), rank=dist.get_rank())\r\n```\r\n\r\nFrom: https:\/\/pytorch.org\/docs\/stable\/distributed.html#torch.distributed.init_process_group\r\n\r\n* `world_size (int, optional)` \u2013 Number of processes participating in the job. Required if store is specified.\r\n* `rank (int, optional)` \u2013 Rank of the current process. Required if store is specified.\r\n\r\nAnd may be an example would be useful, so that the user doesn't even need to think about where to get `dist`:\r\n```\r\nimport torch.distributed as dist\r\nif dist.is_initialized():\r\n metric = load_metric(metric_name, world_size=dist.get_world_size(), rank=dist.get_rank())\r\nelse:\r\n metric = load_metric(metric_name)\r\n```\r\n\r\nI'm aware this is pytorch-centric, but it's better than no examples, IMHO.\r\n\r\nThank you.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1957\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1957\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1956","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1956\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1956\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1956\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1956","id":818013741,"node_id":"MDU6SXNzdWU4MTgwMTM3NDE=","number":1956,"title":"[distributed env] potentially unsafe parallel execution","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-27T20:38:45Z","updated_at":"2021-03-01T17:24:42Z","closed_at":"2021-03-01T17:24:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nmetric = load_metric('glue', 'mrpc', num_process=num_process, process_id=rank)\r\n```\r\n\r\npresumes that there is only one set of parallel processes running - and will intermittently fail if you have multiple sets running as they will surely overwrite each other. Similar to https:\/\/github.com\/huggingface\/datasets\/issues\/1942 (but for a different reason).\r\nThat's why dist environments use some unique to a group identifier so that each group is dealt with separately. \r\n\r\ne.g. the env-way of pytorch dist syncing is done with a unique per set `MASTER_ADDRESS+MASTER_PORT`\r\n\r\nSo ideally this interface should ask for a shared secret to do the right thing.\r\n\r\nI'm not reporting an immediate need, but am only flagging that this will hit someone down the road.\r\n\r\nThis problem can be remedied by adding a new optional `shared_secret` option, which can then be used to differentiate different groups of processes. and this secret should be part of the file lock name and the experiment.\r\n\r\nThank you","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1956\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1956\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1955","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1955\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1955\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1955\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1955","id":818010664,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgxMzk2OTA5","number":1955,"title":"typos + grammar","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-27T20:21:43Z","updated_at":"2021-03-01T17:20:38Z","closed_at":"2021-03-01T14:43:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1955","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1955","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1955.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1955.patch","merged_at":"2021-03-01T14:43:19Z"},"body":"This PR proposes a few typo + grammar fixes, and rewrites some sentences in an attempt to improve readability.\r\n\r\nN.B. When referring to the library `datasets` in the docs it is typically used as a singular, and it definitely is a singular when written as \"`datasets` library\", that is \"`datasets` library is ...\" and not \"are ...\".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1955\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1955\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1954","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1954\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1954\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1954\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1954","id":817565563,"node_id":"MDU6SXNzdWU4MTc1NjU1NjM=","number":1954,"title":"add a new column ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-02-26T18:17:27Z","updated_at":"2021-04-29T14:50:43Z","closed_at":"2021-04-29T14:50:43Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI'd need to add a new column to the dataset, I was wondering how this can be done? thanks \r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1954\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1954\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1953","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1953\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1953\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1953\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1953","id":817498869,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwOTgyMDMz","number":1953,"title":"Documentation for to_csv, to_pandas and to_dict","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-26T16:35:49Z","updated_at":"2021-03-01T14:03:48Z","closed_at":"2021-03-01T14:03:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1953","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1953","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1953.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1953.patch","merged_at":"2021-03-01T14:03:47Z"},"body":"I added these methods to the documentation with a small paragraph.\r\n\r\nI also fixed some formatting issues in the docstrings","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1953\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1953\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1952","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1952\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1952\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1952\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1952","id":817428160,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwOTIyNjQw","number":1952,"title":"Handle timeouts","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-26T15:02:07Z","updated_at":"2021-03-01T14:29:24Z","closed_at":"2021-03-01T14:29:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1952","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1952","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1952.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1952.patch","merged_at":"2021-03-01T14:29:24Z"},"body":"As noticed in https:\/\/github.com\/huggingface\/datasets\/issues\/1939, timeouts were not properly handled when loading a dataset.\r\nThis caused the connection to hang indefinitely when working in a firewalled environment cc @stas00 \r\n\r\nI added a default timeout, and included an option to our offline environment for tests to be able to simulate both connection errors and timeout errors (previously it was simulating connection errors only).\r\n\r\nNow networks calls don't hang indefinitely.\r\nThe default timeout is set to 10sec (we might reduce it).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1952\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1952\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1951","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1951\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1951\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1951\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1951","id":817423573,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwOTE4ODE2","number":1951,"title":"Add cross-platform support for datasets-cli","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-26T14:56:25Z","updated_at":"2021-03-11T02:18:26Z","closed_at":"2021-02-26T15:30:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1951","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1951","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1951.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1951.patch","merged_at":"2021-02-26T15:30:26Z"},"body":"One thing I've noticed while going through the codebase is the usage of `scripts` in `setup.py`. This [answer](https:\/\/stackoverflow.com\/a\/28119736\/14095927) on SO explains it nicely why it's better to use `entry_points` instead of `scripts`. To add cross-platform support to the CLI, this PR replaces `scripts` with `entry_points` in `setup.py` and moves datasets-cli to src\/datasets\/commands\/datasets_cli.py. All *.md and *.rst files are updated accordingly. The same changes were made in the transformers repo to add cross-platform ([link to PR](https:\/\/github.com\/huggingface\/transformers\/pull\/4131)).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1951\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1951\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1950","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1950\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1950\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1950\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1950","id":817295235,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwODExMjMz","number":1950,"title":"updated multi_nli dataset with missing fields","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-26T11:54:36Z","updated_at":"2021-03-01T11:08:30Z","closed_at":"2021-03-01T11:08:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1950","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1950","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1950.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1950.patch","merged_at":"2021-03-01T11:08:29Z"},"body":"1) updated fields which were missing earlier\r\n2) added tags to README\r\n3) updated a few fields of README \r\n4) new dataset_infos.json and dummy files","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1950\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1950\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1949","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1949\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1949\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1949\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1949","id":816986936,"node_id":"MDU6SXNzdWU4MTY5ODY5MzY=","number":1949,"title":"Enable Fast Filtering using Arrow Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-26T02:53:37Z","updated_at":"2021-02-26T19:18:29Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi @lhoestq,\r\n\r\nAs mentioned in Issue #1796, I would love to work on enabling fast filtering\/mapping. Can you please share the expectations? It would be great if you could point me to the relevant methods\/files involved. Or the docs or maybe an overview of `arrow_dataset.py`. I only ask this because I am having trouble getting started ;-;\r\n\r\nAny help would be appreciated.\r\n\r\nThanks,\r\nGunjan","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1949\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1949\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1948","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1948\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1948\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1948\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1948","id":816689329,"node_id":"MDU6SXNzdWU4MTY2ODkzMjk=","number":1948,"title":"dataset loading logger level","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-25T18:33:37Z","updated_at":"2022-02-08T12:47:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"on master I get this with `--dataset_name wmt16 --dataset_config ro-en`:\r\n\r\n```\r\nWARNING:datasets.arrow_dataset:Loading cached processed dataset at \/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/9dc00622c30446e99c4c63d12a484ea4fb653f2f37c867d6edcec839d7eae50f\/cache-2e01bead8cf42e26.arrow\r\nWARNING:datasets.arrow_dataset:Loading cached processed dataset at \/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/9dc00622c30446e99c4c63d12a484ea4fb653f2f37c867d6edcec839d7eae50f\/cache-ac3bebaf4f91f776.arrow\r\nWARNING:datasets.arrow_dataset:Loading cached processed dataset at \/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/9dc00622c30446e99c4c63d12a484ea4fb653f2f37c867d6edcec839d7eae50f\/cache-810c3e61259d73a9.arrow\r\n```\r\n\r\nwhy are those WARNINGs? Should be INFO, no?\r\n\r\nwarnings should only be used when a user needs to pay attention to something, this is just informative - I'd even say it should be DEBUG, but definitely not WARNING.\r\n\r\nThank you.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1948\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1948\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1947","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1947\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1947\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1947\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1947","id":816590299,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwMjI2MDk5","number":1947,"title":"Update documentation with not in place transforms and update DatasetDict","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-25T16:23:18Z","updated_at":"2021-03-01T14:36:54Z","closed_at":"2021-03-01T14:36:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1947","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1947","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1947.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1947.patch","merged_at":"2021-03-01T14:36:53Z"},"body":"In #1883 were added the not in-place transforms `flatten`, `remove_columns`, `rename_column` and `cast`.\r\n\r\nI added them to the documentation and added a paragraph on how to use them\r\n\r\nYou can preview the documentation [here](https:\/\/28862-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/processing.html#renaming-removing-casting-and-flattening-columns)\r\n\r\nI also added these methods to the DatasetDict class.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1947\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1947\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1946","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1946\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1946\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1946\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1946","id":816526294,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwMTcyNzI2","number":1946,"title":"Implement Dataset from CSV","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-25T15:10:13Z","updated_at":"2021-03-12T09:42:48Z","closed_at":"2021-03-12T09:42:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1946","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1946","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1946.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1946.patch","merged_at":"2021-03-12T09:42:48Z"},"body":"Implement `Dataset.from_csv`.\r\n\r\nAnalogue to #1943.\r\n\r\nIf finally, the scripts should be used instead, at least we can reuse the tests here. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1946\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1946\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1945","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1945\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1945\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1945\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1945","id":816421966,"node_id":"MDU6SXNzdWU4MTY0MjE5NjY=","number":1945,"title":"AttributeError: 'DatasetDict' object has no attribute 'concatenate_datasets'","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-25T13:09:45Z","updated_at":"2021-02-25T13:20:35Z","closed_at":"2021-02-25T13:20:26Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am trying to concatenate a list of huggingface datastes as:\r\n\r\n` train_dataset = datasets.concatenate_datasets(train_datasets)\r\n`\r\nHere is the `train_datasets` when I print:\r\n\r\n```\r\n[Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 120361\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 2670\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 6944\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 38140\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 173711\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 1655\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 4274\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 2019\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 2109\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 11963\r\n})]\r\n```\r\n\r\nI am getting the following error:\r\n\r\n`AttributeError: 'DatasetDict' object has no attribute 'concatenate_datasets'\r\n`\r\n\r\nI was wondering if you could help me with this issue, thanks a lot ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1945\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1945\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1944","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1944\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1944\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1944\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1944","id":816267216,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc5OTU2Nzc3","number":1944,"title":"Add Turkish News Category Dataset (270K - Lite Version)","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-25T09:45:22Z","updated_at":"2021-03-02T17:46:41Z","closed_at":"2021-03-01T18:23:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1944","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1944","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1944.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1944.patch","merged_at":null},"body":"This PR adds the Turkish News Categories Dataset (270K - Lite Version) dataset which is a text classification dataset by me, @basakbuluz and @serdarakyol. \r\nThis dataset contains the same news from the current [interpress_news_category_tr dataset](https:\/\/huggingface.co\/datasets\/interpress_news_category_tr) but contains less information, OCR errors are reduced, can be easily separated, and can be divided into 10 classes (\"k\u00fclt\u00fcrsanat\", \"ekonomi\", \"siyaset\", \"e\u011fitim\", \"d\u00fcnya\", \"spor\", \"teknoloji\", \"magazin\", \"sa\u011fl\u0131k\", \"g\u00fcndem\") were rearranged.\r\n\r\n@SBrandeis @lhoestq, can you please review this PR?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1944\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1944\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1943","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1943\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1943\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1943\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1943","id":816160453,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc5ODY5NTk0","number":1943,"title":"Implement Dataset from JSON and JSON Lines","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2021-02-25T07:17:33Z","updated_at":"2021-03-18T09:42:08Z","closed_at":"2021-03-18T09:42:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1943","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1943","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1943.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1943.patch","merged_at":"2021-03-18T09:42:08Z"},"body":"Implement `Dataset.from_jsonl`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1943\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1943\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1942","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1942\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1942\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1942\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1942","id":816037520,"node_id":"MDU6SXNzdWU4MTYwMzc1MjA=","number":1942,"title":"[experiment] missing default_experiment-1-0.arrow","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":17,"created_at":"2021-02-25T03:02:15Z","updated_at":"2021-03-01T18:33:31Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"the original report was pretty bad and incomplete - my apologies!\r\n\r\nPlease see the complete version here: https:\/\/github.com\/huggingface\/datasets\/issues\/1942#issuecomment-786336481\r\n\r\n------------\r\n\r\nAs mentioned here https:\/\/github.com\/huggingface\/datasets\/issues\/1939 metrics don't get cached, looking at my local `~\/.cache\/huggingface\/metrics` - there are many `*.arrow.lock` files but zero metrics files.\r\n\r\nw\/o the network I get:\r\n```\r\nFileNotFoundError: [Errno 2] No such file or directory: '~\/.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow\r\n```\r\nthere is just `~\/.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow.lock`\r\n\r\nI did run the same `run_seq2seq.py` script on the instance with network and it worked just fine, but only the lock file was left behind.\r\n\r\nthis is with master.\r\n\r\nThank you.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1942\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1942\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1941","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1941\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1941\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1941\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1941","id":815985167,"node_id":"MDU6SXNzdWU4MTU5ODUxNjc=","number":1941,"title":"Loading of FAISS index fails for index_name = 'exact'","user":{"login":"mkserge","id":2992022,"node_id":"MDQ6VXNlcjI5OTIwMjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2992022?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mkserge","html_url":"https:\/\/github.com\/mkserge","followers_url":"https:\/\/api.github.com\/users\/mkserge\/followers","following_url":"https:\/\/api.github.com\/users\/mkserge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mkserge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mkserge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mkserge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mkserge\/orgs","repos_url":"https:\/\/api.github.com\/users\/mkserge\/repos","events_url":"https:\/\/api.github.com\/users\/mkserge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mkserge\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-02-25T01:30:54Z","updated_at":"2021-02-25T14:28:46Z","closed_at":"2021-02-25T14:28:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nIt looks like loading of FAISS index now fails when using index_name = 'exact'.\r\n\r\nFor example, from the RAG [model card](https:\/\/huggingface.co\/facebook\/rag-token-nq?fbclid=IwAR3bTfhls5U_t9DqsX2Vzb7NhtRHxJxfQ-uwFT7VuCPMZUM2AdAlKF_qkI8#usage).\r\n\r\nRunning `transformers==4.3.2` and datasets installed from source on latest `master` branch.\r\n\r\n```bash\r\n(venv) sergey_mkrtchyan datasets (master) $ python\r\nPython 3.8.6 (v3.8.6:db455296be, Sep 23 2020, 13:31:39)\r\n[Clang 6.0 (clang-600.0.57)] on darwin\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n>>> from transformers import RagTokenizer, RagRetriever, RagTokenForGeneration\r\n>>> tokenizer = RagTokenizer.from_pretrained(\"facebook\/rag-token-nq\")\r\n>>> retriever = RagRetriever.from_pretrained(\"facebook\/rag-token-nq\", index_name=\"exact\", use_dummy_dataset=True)\r\nUsing custom data configuration dummy.psgs_w100.nq.no_index-dummy=True,with_index=False\r\nReusing dataset wiki_dpr (\/Users\/sergey_mkrtchyan\/.cache\/huggingface\/datasets\/wiki_dpr\/dummy.psgs_w100.nq.no_index-dummy=True,with_index=False\/0.0.0\/8a97e0f4fa5bc46e179474db6a61b09d5d2419d2911835bd3f91d110c936d8bb)\r\nUsing custom data configuration dummy.psgs_w100.nq.exact-50b6cda57ff32ab4\r\nReusing dataset wiki_dpr (\/Users\/sergey_mkrtchyan\/.cache\/huggingface\/datasets\/wiki_dpr\/dummy.psgs_w100.nq.exact-50b6cda57ff32ab4\/0.0.0\/8a97e0f4fa5bc46e179474db6a61b09d5d2419d2911835bd3f91d110c936d8bb)\r\n 0%| | 0\/10 [00:00\", line 1, in \r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/transformers\/models\/rag\/retrieval_rag.py\", line 425, in from_pretrained\r\n return cls(\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/transformers\/models\/rag\/retrieval_rag.py\", line 387, in __init__\r\n self.init_retrieval()\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/transformers\/models\/rag\/retrieval_rag.py\", line 458, in init_retrieval\r\n self.index.init_index()\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/transformers\/models\/rag\/retrieval_rag.py\", line 284, in init_index\r\n self.dataset = load_dataset(\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/load.py\", line 750, in load_dataset\r\n ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/builder.py\", line 734, in as_dataset\r\n datasets = utils.map_nested(\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/utils\/py_utils.py\", line 195, in map_nested\r\n return function(data_struct)\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/builder.py\", line 769, in _build_single_dataset\r\n post_processed = self._post_process(ds, resources_paths)\r\n File \"\/Users\/sergey_mkrtchyan\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wiki_dpr\/8a97e0f4fa5bc46e179474db6a61b09d5d2419d2911835bd3f91d110c936d8bb\/wiki_dpr.py\", line 205, in _post_process\r\n dataset.add_faiss_index(\"embeddings\", custom_index=index)\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/arrow_dataset.py\", line 2516, in add_faiss_index\r\n super().add_faiss_index(\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/search.py\", line 416, in add_faiss_index\r\n faiss_index.add_vectors(self, column=column, train_size=train_size, faiss_verbose=faiss_verbose)\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/search.py\", line 281, in add_vectors\r\n self.faiss_index.add(vecs)\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/faiss\/__init__.py\", line 104, in replacement_add\r\n self.add_c(n, swig_ptr(x))\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/faiss\/swigfaiss.py\", line 3263, in add\r\n return _swigfaiss.IndexHNSW_add(self, n, x)\r\nRuntimeError: Error in virtual void faiss::IndexHNSW::add(faiss::Index::idx_t, const float *) at \/Users\/runner\/work\/faiss-wheels\/faiss-wheels\/faiss\/faiss\/IndexHNSW.cpp:356: Error: 'is_trained' failed\r\n>>>\r\n```\r\n\r\nThe issue seems to be related to the scalar quantization in faiss added in this commit: 8c5220307c33f00e01c3bf7b8. Reverting it fixes the issue.\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1941\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1941\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1940","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1940\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1940\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1940\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1940","id":815770012,"node_id":"MDU6SXNzdWU4MTU3NzAwMTI=","number":1940,"title":"Side effect when filtering data due to `does_function_return_dict` call in `Dataset.map()`","user":{"login":"francisco-perez-sorrosal","id":918006,"node_id":"MDQ6VXNlcjkxODAwNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918006?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal","html_url":"https:\/\/github.com\/francisco-perez-sorrosal","followers_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/followers","following_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/orgs","repos_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/repos","events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-24T19:18:56Z","updated_at":"2021-03-23T15:26:49Z","closed_at":"2021-03-23T15:26:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi there!\r\n\r\nIn my codebase I have a function to filter rows in a dataset, selecting only a certain number of examples per class. The function passes a extra argument to maintain a counter of the number of dataset rows\/examples already selected per each class, which are the ones I want to keep in the end:\r\n\r\n```python\r\n def fill_train_examples_per_class(example, per_class_limit: int, counter: collections.Counter):\r\n label = int(example['label'])\r\n current_counter = counter.get(label, 0)\r\n if current_counter < per_class_limit:\r\n counter[label] = current_counter + 1\r\n return True\r\n return False\r\n```\r\n\r\nAt some point I invoke it through the `Dataset.filter()` method in the `arrow_dataset.py` module like this:\r\n\r\n```python\r\n...\r\nkwargs = {\"per_class_limit\": train_examples_per_class_limit, \"counter\": Counter()}\r\ndatasets['train'] = datasets['train'].filter(fill_train_examples_per_class, num_proc=1, fn_kwargs=kwargs)\r\n...\r\n```\r\n\r\nThe problem is that, passing a stateful container (the counter,) provokes a side effect in the new filtered dataset obtained. This is due to the fact that at some point in `filter()`, the `map()`'s function `does_function_return_dict` is invoked in line [1290](https:\/\/github.com\/huggingface\/datasets\/blob\/96578adface7e4bc1f3e8bafbac920d72ca1ca60\/src\/datasets\/arrow_dataset.py#L1290). \r\n\r\nWhen this occurs, the state of the counter is initially modified by the effects of the function call on the 1 or 2 rows selected in lines 1288 and 1289 of the same file (which are marked as `test_inputs` & `test_indices` respectively in lines 1288 and 1289. This happens out of the control of the user (which for example can't reset the state of the counter before continuing the execution,) provoking in the end an undesired side effect in the results obtained. \r\n\r\nIn my case, the resulting dataset -despite of the counter results are ok- lacks an instance of the classes 0 and 1 (which happen to be the classes of the first two examples of my dataset.) The rest of the classes I have in my dataset, contain the right number of examples as they were not affected by the effects of `does_function_return_dict` call.\r\n\r\nI've debugged my code extensively and made a workaround myself hardcoding the necessary stuff (basically putting `update_data=True` in line 1290,) and then I obtain the results I expected without the side effect.\r\n\r\nIs there a way to avoid that call to `does_function_return_dict` in map()'s line 1290 ? (e.g. extracting the required information that `does_function_return_dict` returns without making the testing calls to the user function on dataset rows 0 & 1) \r\n\r\nThanks in advance,\r\n\r\nFrancisco Perez-Sorrosal\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1940\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1940\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1939","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1939\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1939\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1939\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1939","id":815680510,"node_id":"MDU6SXNzdWU4MTU2ODA1MTA=","number":1939,"title":"[firewalled env] OFFLINE mode","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2021-02-24T17:13:42Z","updated_at":"2021-03-05T05:09:54Z","closed_at":"2021-03-05T05:09:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This issue comes from a need to be able to run `datasets` in a firewalled env, which currently makes the software hang until it times out, as it's unable to complete the network calls.\r\n\r\nI propose the following approach to solving this problem, using the example of `run_seq2seq.py` as a sample program. There are 2 possible ways to going about it.\r\n\r\n## 1. Manual\r\n\r\nmanually prepare data and metrics files, that is transfer to the firewalled instance the dataset and the metrics and run:\r\n\r\n```\r\nDATASETS_OFFLINE=1 run_seq2seq.py --train_file xyz.csv --validation_file xyz.csv ...\r\n```\r\n\r\n`datasets` must not make any network calls and if there is a logic to do that and something is missing it should assert that this or that action requires network and therefore it can't proceed.\r\n\r\n## 2. Automatic\r\n\r\nIn some clouds one can prepare a datastorage ahead of time with a normal networked environment but which doesn't have gpus and then one switches to the gpu instance which is firewalled, but it can access all the cached data. This is the ideal situation, since in this scenario we don't have to do anything manually, but simply run the same application twice:\r\n\r\n1. on the non-firewalled instance:\r\n```\r\nrun_seq2seq.py --dataset_name wmt16 --dataset_config ro-en ...\r\n```\r\n\r\nwhich should download and cached everything.\r\n\r\n2. and then immediately after on the firewalled instance, which shares the same filesystem\r\n```\r\nDATASETS_OFFLINE=1 run_seq2seq.py --dataset_name wmt16 --dataset_config ro-en ...\r\n```\r\n\r\nand the metrics and datasets should be cached by the invocation number 1 and any network calls be skipped and if the logic is missing data it should assert and not try to fetch any data from online.\r\n\r\n## Common Issues\r\n\r\n1. for example currently `datasets` tries to look up online datasets if the files contain json or csv, despite the paths already provided\r\n\r\n```\r\n if dataset and path in _PACKAGED_DATASETS_MODULES:\r\n```\r\n\r\n2. it has an issue with metrics. e.g. I had to manually copy `rouge\/rouge.py` from the `datasets` repo to the current dir - or it was hanging.\r\n\r\nI had to comment out `head_hf_s3(...)` calls to make things work. So all those `try: head_hf_s3(...)` shouldn't be tried with `DATASETS_OFFLINE=1`\r\n\r\nHere is the corresponding issue for `transformers`: https:\/\/github.com\/huggingface\/transformers\/issues\/10379\r\n\r\nThanks.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1939\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1939\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1938","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1938\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1938\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1938\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1938","id":815647774,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc5NDQyNDkw","number":1938,"title":"Disallow ClassLabel with no names","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-24T16:37:57Z","updated_at":"2021-02-25T11:27:29Z","closed_at":"2021-02-25T11:27:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1938","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1938","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1938.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1938.patch","merged_at":"2021-02-25T11:27:29Z"},"body":"It was possible to create a ClassLabel without specifying the names or the number of classes.\r\nThis was causing silent issues as in #1936 and breaking the conversion methods str2int and int2str.\r\n\r\ncc @justin-yan ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1938\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1938\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1937","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1937\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1937\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1937\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1937","id":815163943,"node_id":"MDU6SXNzdWU4MTUxNjM5NDM=","number":1937,"title":"CommonGen dataset page shows an error OSError: [Errno 28] No space left on device","user":{"login":"yuchenlin","id":10104354,"node_id":"MDQ6VXNlcjEwMTA0MzU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10104354?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yuchenlin","html_url":"https:\/\/github.com\/yuchenlin","followers_url":"https:\/\/api.github.com\/users\/yuchenlin\/followers","following_url":"https:\/\/api.github.com\/users\/yuchenlin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yuchenlin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yuchenlin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yuchenlin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yuchenlin\/orgs","repos_url":"https:\/\/api.github.com\/users\/yuchenlin\/repos","events_url":"https:\/\/api.github.com\/users\/yuchenlin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yuchenlin\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-24T06:47:33Z","updated_at":"2021-02-26T11:10:06Z","closed_at":"2021-02-26T11:10:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The page of the CommonGen data https:\/\/huggingface.co\/datasets\/viewer\/?dataset=common_gen shows \r\n![image](https:\/\/user-images.githubusercontent.com\/10104354\/108959311-1865e600-7629-11eb-868c-cf4cb27034ea.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1937\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1937\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1936","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1936\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1936\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1936\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1936","id":814726512,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4NjY3NTQ4","number":1936,"title":"[WIP] Adding Support for Reading Pandas Category","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-02-23T18:32:54Z","updated_at":"2021-03-09T07:09:05Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1936","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1936","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1936.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1936.patch","merged_at":null},"body":"@lhoestq - continuing our conversation from https:\/\/github.com\/huggingface\/datasets\/issues\/1906#issuecomment-784247014\r\n\r\nThe goal of this PR is to support `Dataset.from_pandas(df)` where the dataframe contains a Category.\r\n\r\nJust the 4 line change below actually does seem to work:\r\n\r\n```\r\n>>> from datasets import Dataset\r\n>>> import pandas as pd\r\n>>> df = pd.DataFrame(pd.Series([\"a\", \"b\", \"c\", \"a\"], dtype=\"category\"))\r\n>>> ds = Dataset.from_pandas(df)\r\n>>> ds.to_pandas()\r\n 0\r\n0 a\r\n1 b\r\n2 c\r\n3 a\r\n>>> ds.to_pandas().dtypes\r\n0 category\r\ndtype: object\r\n```\r\n\r\nsave_to_disk, etc. all seem to work as well. The main things that are theoretically \"incorrect\" if we leave this are:\r\n\r\n```\r\n>>> ds.features.type\r\nStructType(struct<0: int64>)\r\n```\r\nthere are a decent number of references to this property in the library, but I can't find anything that seems to actually break as a result of this being int64 vs. dictionary? I think the gist of my question is: a) do we *need* to change the dtype of Classlabel and have get_nested_type return a pyarrow.DictionaryType instead of int64? and b) do you *want* it to change? The biggest challenge I see to implementing this correctly is that the data will need to be passed in along with the pyarrow schema when instantiating the Classlabel (I *think* this is unavoidable, since the type itself doesn't contain the actual label values) which could be a fairly intrusive change - e.g. `from_arrow_schema`'s interface would need to change to include optional arrow data? Once we start going down this path of modifying the public interfaces I am admittedly feeling a little bit outside of my comfort zone\r\n\r\nAdditionally I think `int2str`, `str2int`, and `encode_example` probably won't work - but I can't find any usages of them in the library itself.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1936\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1936\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1935","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1935\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1935\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1935\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1935","id":814623827,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4NTgyMzk1","number":1935,"title":"add CoVoST2","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-23T16:28:16Z","updated_at":"2021-02-24T18:09:32Z","closed_at":"2021-02-24T18:05:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1935","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1935","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1935.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1935.patch","merged_at":"2021-02-24T18:05:09Z"},"body":"This PR adds the CoVoST2 dataset for speech translation and ASR.\r\nhttps:\/\/github.com\/facebookresearch\/covost#covost-2\r\n\r\nThe dataset requires manual download as the download page requests an email address and the URLs are temporary.\r\n\r\nThe dummy data is a bit bigger because of the mp3 files and 36 configs.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1935\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1935\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1934","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1934\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1934\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1934\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1934","id":814437190,"node_id":"MDU6SXNzdWU4MTQ0MzcxOTA=","number":1934,"title":"Add Stanford Sentiment Treebank (SST)","user":{"login":"patpizio","id":15801338,"node_id":"MDQ6VXNlcjE1ODAxMzM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15801338?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patpizio","html_url":"https:\/\/github.com\/patpizio","followers_url":"https:\/\/api.github.com\/users\/patpizio\/followers","following_url":"https:\/\/api.github.com\/users\/patpizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patpizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patpizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patpizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patpizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/patpizio\/repos","events_url":"https:\/\/api.github.com\/users\/patpizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patpizio\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-23T12:53:16Z","updated_at":"2021-03-18T17:51:44Z","closed_at":"2021-03-18T17:51:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am going to add SST:\r\n\r\n- **Name:** The Stanford Sentiment Treebank\r\n- **Description:** The first corpus with fully labeled parse trees that allows for a complete analysis of the compositional effects of sentiment in language\r\n- **Paper:** [Recursive Deep Models for Semantic Compositionality Over a Sentiment Treebank](https:\/\/nlp.stanford.edu\/~socherr\/EMNLP2013_RNTN.pdf)\r\n- **Data:** https:\/\/nlp.stanford.edu\/sentiment\/index.html\r\n- **Motivation:** Already requested in #353, SST is a popular dataset for Sentiment Classification\r\n\r\nWhat's the difference with the [_SST-2_](https:\/\/huggingface.co\/datasets\/viewer\/?dataset=glue&config=sst2) dataset included in GLUE? Essentially, SST-2 is a version of SST where:\r\n- the labels were mapped from real numbers in [0.0, 1.0] to a binary label: {0, 1}\r\n- the labels of the *sub-sentences* were included only in the training set\r\n- the labels in the test set are obfuscated\r\n\r\nSo there is a lot more information in the original SST. The tricky bit is, the data is scattered into many text files and, for one in particular, I couldn't find the original encoding ([*but I'm not the only one*](https:\/\/groups.google.com\/g\/word2vec-toolkit\/c\/QIUjLw6RqFk\/m\/_iEeyt428wkJ) \ud83c\udfb5). The only solution I found was to manually replace all the \u00e8, \u00eb, \u00e7 and so on into an `utf-8` copy of the text file. I uploaded the result in my Dropbox and I am using that as the main repo for the dataset.\r\n\r\nAlso, the _sub-sentences_ are built at run-time from the information encoded in several text files, so generating the examples is a bit more cumbersome than usual. Luckily, the dataset is not enormous.\r\n\r\nI plan to divide the dataset in 2 configs: one with just whole sentences with their labels, the other with sentences _and their sub-sentences_ with their labels. Each config will be split in train, validation and test. Hopefully this makes sense, we may discuss it in the PR I'm going to submit.\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1934\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1934\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1933","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1933\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1933\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1933\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1933","id":814335846,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4MzQwMzk3","number":1933,"title":"Use arrow ipc file format","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-23T10:38:24Z","updated_at":"2021-02-23T10:38:24Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1933","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1933","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1933.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1933.patch","merged_at":null},"body":"According to the [documentation](https:\/\/arrow.apache.org\/docs\/format\/Columnar.html?highlight=arrow1#ipc-file-format), it's identical to the streaming format except that it contains the memory offsets of each sample:\r\n\r\n> We define a \u201cfile format\u201d supporting random access that is build with the stream format. The file starts and ends with a magic string ARROW1 (plus padding). What follows in the file is identical to the stream format. At the end of the file, we write a footer containing a redundant copy of the schema (which is a part of the streaming format) plus memory offsets and sizes for each of the data blocks in the file. This enables random access any record batch in the file. See File.fbs for the precise details of the file footer.\r\n\r\nSince it stores more metadata regarding the positions of the examples in the file, it should enable better example retrieval performances. However from the discussion in https:\/\/github.com\/huggingface\/datasets\/issues\/1803 it looks like it's not the case unfortunately. Maybe in the future this will allow speed gains.\r\n\r\nI think it's still a good idea to start using it anyway for these reasons:\r\n- in the future we may have speed gains\r\n- it contains the arrow streaming format data\r\n- it's compatible with the pyarrow Dataset implementation (it allows to load remote dataframes for example) if we want to use it in the future\r\n- it's also the format used by arrow feather if we want to use it in the future\r\n- it's roughly the same size as the streaming format\r\n- it's easy to have backward compatibility with the streaming format\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1933\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1933\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1932","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1932\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1932\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1932\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1932","id":814326116,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4MzMyMTQy","number":1932,"title":"Fix builder config creation with data_dir","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-23T10:26:02Z","updated_at":"2021-02-23T10:45:28Z","closed_at":"2021-02-23T10:45:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1932","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1932","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1932.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1932.patch","merged_at":"2021-02-23T10:45:27Z"},"body":"The data_dir parameter wasn't taken into account to create the config_id, therefore the resulting builder config was considered not custom. However a builder config that is non-custom must not have a name that collides with the predefined builder config names. Therefore it resulted in a `ValueError(\"Cannot name a custom BuilderConfig the same as an available...\")`\r\n\r\nI fixed that by commenting the line that used to ignore the data_dir when creating the config.\r\n\r\nIt was previously ignored before the introduction of config id because we didn't want to change the config name. Now it's fine to take it into account for the config id.\r\n\r\nNow creating a config with a data_dir works again @patrickvonplaten ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1932\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1932\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1931","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1931\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1931\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1931\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1931","id":814225074,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4MjQ4NTA5","number":1931,"title":"add m_lama (multilingual lama) dataset","user":{"login":"pdufter","id":13961899,"node_id":"MDQ6VXNlcjEzOTYxODk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13961899?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pdufter","html_url":"https:\/\/github.com\/pdufter","followers_url":"https:\/\/api.github.com\/users\/pdufter\/followers","following_url":"https:\/\/api.github.com\/users\/pdufter\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pdufter\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pdufter\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pdufter\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pdufter\/orgs","repos_url":"https:\/\/api.github.com\/users\/pdufter\/repos","events_url":"https:\/\/api.github.com\/users\/pdufter\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pdufter\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-23T08:11:57Z","updated_at":"2021-03-01T10:01:03Z","closed_at":"2021-03-01T10:01:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1931","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1931","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1931.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1931.patch","merged_at":"2021-03-01T10:01:03Z"},"body":"Add a multilingual (machine translated and automatically generated) version of the LAMA benchmark. For details see the paper https:\/\/arxiv.org\/pdf\/2102.00894.pdf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1931\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1931\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1930","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1930\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1930\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1930\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1930","id":814055198,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4MTAwNzI0","number":1930,"title":"updated the wino_bias dataset","user":{"login":"JieyuZhao","id":22306304,"node_id":"MDQ6VXNlcjIyMzA2MzA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22306304?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JieyuZhao","html_url":"https:\/\/github.com\/JieyuZhao","followers_url":"https:\/\/api.github.com\/users\/JieyuZhao\/followers","following_url":"https:\/\/api.github.com\/users\/JieyuZhao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JieyuZhao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JieyuZhao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JieyuZhao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JieyuZhao\/orgs","repos_url":"https:\/\/api.github.com\/users\/JieyuZhao\/repos","events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-23T03:07:40Z","updated_at":"2021-04-07T15:24:56Z","closed_at":"2021-04-07T15:24:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1930","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1930","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1930.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1930.patch","merged_at":"2021-04-07T15:24:56Z"},"body":"Updated the wino_bias.py script.\r\n- updated the data_url\r\n- added different configurations for different data splits\r\n- added the coreference_cluster to the data features","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1930\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1930\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1929","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1929\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1929\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1929\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1929","id":813929669,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3OTk1MTE4","number":1929,"title":"Improve typing and style and fix some inconsistencies","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-22T22:47:41Z","updated_at":"2021-02-24T16:16:14Z","closed_at":"2021-02-24T14:03:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1929","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1929","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1929.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1929.patch","merged_at":"2021-02-24T14:03:53Z"},"body":"This PR:\r\n* improves typing (mostly more consistent use of `typing.Optional`)\r\n* `DatasetDict.cleanup_cache_files` now correctly returns a dict \r\n* replaces `dict()` with the corresponding literal\r\n* uses `dict_to_copy.copy()` instead of `dict(dict_to_copy)` for shallow copying","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1929\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1929\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1928","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1928\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1928\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1928\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1928","id":813793434,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3ODgyMDM4","number":1928,"title":"Updating old cards","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-22T19:26:04Z","updated_at":"2021-02-23T18:19:25Z","closed_at":"2021-02-23T18:19:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1928","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1928","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1928.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1928.patch","merged_at":"2021-02-23T18:19:25Z"},"body":"Updated the cards for [Allocine](https:\/\/github.com\/mcmillanmajora\/datasets\/tree\/updating-old-cards\/datasets\/allocine), [CNN\/DailyMail](https:\/\/github.com\/mcmillanmajora\/datasets\/tree\/updating-old-cards\/datasets\/cnn_dailymail), and [SNLI](https:\/\/github.com\/mcmillanmajora\/datasets\/tree\/updating-old-cards\/datasets\/snli). For the most part, the information was just rearranged or rephrased, but the social impact statements are new. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1928\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1928\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1927","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1927\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1927\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1927\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1927","id":813768935,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3ODYxODM5","number":1927,"title":"Update README.md","user":{"login":"JieyuZhao","id":22306304,"node_id":"MDQ6VXNlcjIyMzA2MzA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22306304?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JieyuZhao","html_url":"https:\/\/github.com\/JieyuZhao","followers_url":"https:\/\/api.github.com\/users\/JieyuZhao\/followers","following_url":"https:\/\/api.github.com\/users\/JieyuZhao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JieyuZhao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JieyuZhao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JieyuZhao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JieyuZhao\/orgs","repos_url":"https:\/\/api.github.com\/users\/JieyuZhao\/repos","events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-22T18:51:34Z","updated_at":"2021-02-23T10:52:45Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1927","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1927","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1927.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1927.patch","merged_at":null},"body":"Updated the info for the wino_bias dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1927\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1927\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1926","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1926\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1926\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1926\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1926","id":813607994,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3NzI4Mjgy","number":1926,"title":"Fix: Wiki_dpr - add missing scalar quantizer","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-22T15:32:05Z","updated_at":"2021-02-22T15:49:54Z","closed_at":"2021-02-22T15:49:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1926","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1926","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1926.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1926.patch","merged_at":"2021-02-22T15:49:53Z"},"body":"All the prebuilt wiki_dpr indexes already use SQ8, I forgot to update the wiki_dpr script after building them. Now it's finally done.\r\n\r\nThe scalar quantizer SQ8 doesn't reduce the performance of the index as shown in retrieval experiments on RAG.\r\nThe quantizer reduces the size of the index a lot but increases index building time.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1926\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1926\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1925","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1925\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1925\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1925\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1925","id":813600902,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3NzIyMzc3","number":1925,"title":"Fix: Wiki_dpr - fix when with_embeddings is False or index_name is \"no_index\"","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-22T15:23:46Z","updated_at":"2021-02-25T01:33:48Z","closed_at":"2021-02-22T15:36:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1925","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1925","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1925.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1925.patch","merged_at":"2021-02-22T15:36:07Z"},"body":"Fix the bugs noticed in #1915 \r\n\r\nThere was a bug when `with_embeddings=False` where the configuration name was the same as if `with_embeddings=True`, which led the dataset builder to do bad verifications (for example it used to expect to download the embeddings for `with_embeddings=False`).\r\n\r\nAnother issue was that setting `index_name=\"no_index\"` didn't set `with_index` to False.\r\n\r\nI fixed both of them and added dummy data for those configurations for testing.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1925\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1925\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1924","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1924\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1924\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1924\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1924","id":813599733,"node_id":"MDU6SXNzdWU4MTM1OTk3MzM=","number":1924,"title":"Anonymous Dataset Addition (i.e Anonymous PR?)","user":{"login":"PierreColombo","id":22492839,"node_id":"MDQ6VXNlcjIyNDkyODM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22492839?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PierreColombo","html_url":"https:\/\/github.com\/PierreColombo","followers_url":"https:\/\/api.github.com\/users\/PierreColombo\/followers","following_url":"https:\/\/api.github.com\/users\/PierreColombo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PierreColombo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PierreColombo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PierreColombo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PierreColombo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PierreColombo\/repos","events_url":"https:\/\/api.github.com\/users\/PierreColombo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PierreColombo\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-22T15:22:30Z","updated_at":"2021-02-23T18:28:10Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\nThanks a lot for your librairy.\r\nWe plan to submit a paper on OpenReview using the Anonymous setting. Is it possible to add a new dataset without breaking the anonimity, with a link to the paper ? \r\nCheers \r\n@eusip","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1924\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1924\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1923","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1923\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1923\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1923\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1923","id":813363472,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3NTI0MTU0","number":1923,"title":"Fix save_to_disk with relative path","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-22T10:27:19Z","updated_at":"2021-02-22T11:22:44Z","closed_at":"2021-02-22T11:22:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1923","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1923","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1923.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1923.patch","merged_at":"2021-02-22T11:22:43Z"},"body":"As noticed in #1919 and #1920 the target directory was not created using `makedirs` so saving to it raises `FileNotFoundError`. For absolute paths it works but not for the good reason. This is because the target path was the same as the temporary path where in-memory data are written as an intermediary step.\r\n\r\nI added the `makedirs` call using `fs.makedirs` in order to support remote filesystems.\r\nI also fixed the issue with the target path being the temporary path.\r\n\r\nI added a test case for relative paths as well for save_to_disk.\r\n\r\nThanks to @M-Salti for reporting and investigating","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1923\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1923\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1922","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1922\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1922\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1922\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1922","id":813140806,"node_id":"MDU6SXNzdWU4MTMxNDA4MDY=","number":1922,"title":"How to update the \"wino_bias\" dataset","user":{"login":"JieyuZhao","id":22306304,"node_id":"MDQ6VXNlcjIyMzA2MzA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22306304?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JieyuZhao","html_url":"https:\/\/github.com\/JieyuZhao","followers_url":"https:\/\/api.github.com\/users\/JieyuZhao\/followers","following_url":"https:\/\/api.github.com\/users\/JieyuZhao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JieyuZhao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JieyuZhao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JieyuZhao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JieyuZhao\/orgs","repos_url":"https:\/\/api.github.com\/users\/JieyuZhao\/repos","events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-22T05:39:39Z","updated_at":"2021-02-22T10:35:59Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi all,\r\n\r\nThanks for the efforts to collect all the datasets! But I think there is a problem with the wino_bias dataset. The current link is not correct. How can I update that?\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1922\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1922\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1921","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1921\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1921\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1921\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1921","id":812716042,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3MDEzMDM4","number":1921,"title":"Standardizing datasets dtypes","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-20T22:04:01Z","updated_at":"2021-02-22T09:44:10Z","closed_at":"2021-02-22T09:44:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1921","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1921","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1921.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1921.patch","merged_at":"2021-02-22T09:44:10Z"},"body":"This PR follows up on discussion in #1900 to have an explicit set of basic dtypes for datasets.\r\n\r\nThis moves away from str(pyarrow.DataType) as the method of choice for creating dtypes, favoring an explicit mapping to a list of supported Value dtypes.\r\n\r\nI believe in practice this should be backward compatible, since anyone previously using Value() would only have been able to use dtypes that had an identically named pyarrow factory function, which are all explicitly supported here, with `float32` and `float64` acting as the official datasets dtypes, which resolves the tension between `double` being the pyarrow dtype and `float64` being the pyarrow type factory function.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1921\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1921\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1920","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1920\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1920\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1920\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1920","id":812628220,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2OTQ5NzI2","number":1920,"title":"Fix save_to_disk issue","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-20T14:22:39Z","updated_at":"2021-02-22T10:30:11Z","closed_at":"2021-02-22T10:30:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1920","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1920","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1920.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1920.patch","merged_at":null},"body":"Fixes #1919 \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1920\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1920\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1919","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1919\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1919\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1919\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1919","id":812626872,"node_id":"MDU6SXNzdWU4MTI2MjY4NzI=","number":1919,"title":"Failure to save with save_to_disk","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-20T14:18:10Z","updated_at":"2021-03-03T17:40:27Z","closed_at":"2021-03-03T17:40:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I try to save a dataset locally using the `save_to_disk` method I get the error:\r\n\r\n```bash\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/content\/squad\/train\/squad-train.arrow'\r\n```\r\n\r\nTo replicate:\r\n\r\n1. Install `datasets` from master\r\n2. Run this code:\r\n\r\n ```python\r\n from datasets import load_dataset\r\n squad = load_dataset(\"squad\") # or any other dataset\r\n squad.save_to_disk(\"squad\") # error here\r\n ```\r\n\r\nThe problem is that the method is not creating a directory with the name `dataset_path` for saving the dataset in (i.e. it's not creating the *train* and *validation* directories in this case). After creating the directory the problem resolves.\r\nI'll open a PR soon doing that and linking this issue.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1919\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1919\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1918","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1918\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1918\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1918\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1918","id":812541510,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2ODg2OTQ0","number":1918,"title":"Fix QA4MRE download URLs","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-20T07:32:17Z","updated_at":"2021-02-22T13:35:06Z","closed_at":"2021-02-22T13:35:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1918","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1918","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1918.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1918.patch","merged_at":"2021-02-22T13:35:06Z"},"body":"The URLs in the `dataset_infos` and `README` are correct, only the ones in the download script needed updating.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1918\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1918\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1917","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1917\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1917\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1917\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1917","id":812390178,"node_id":"MDU6SXNzdWU4MTIzOTAxNzg=","number":1917,"title":"UnicodeDecodeError: windows 10 machine","user":{"login":"yosiasz","id":900951,"node_id":"MDQ6VXNlcjkwMDk1MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/900951?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yosiasz","html_url":"https:\/\/github.com\/yosiasz","followers_url":"https:\/\/api.github.com\/users\/yosiasz\/followers","following_url":"https:\/\/api.github.com\/users\/yosiasz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yosiasz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yosiasz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yosiasz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yosiasz\/orgs","repos_url":"https:\/\/api.github.com\/users\/yosiasz\/repos","events_url":"https:\/\/api.github.com\/users\/yosiasz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yosiasz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-19T22:13:05Z","updated_at":"2021-02-19T22:41:11Z","closed_at":"2021-02-19T22:40:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Windows 10\r\nPhp 3.6.8\r\n\r\nwhen running\r\n\r\n```\r\nimport datasets\r\n\r\noscar_am = datasets.load_dataset(\"oscar\", \"unshuffled_deduplicated_am\")\r\nprint(oscar_am[\"train\"][0])\r\n```\r\nI get the following error\r\n\r\n```\r\nfile \"C:\\PYTHON\\3.6.8\\lib\\encodings\\cp1252.py\", line 23, in decode\r\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\r\nUnicodeDecodeError: 'charmap' codec can't decode byte 0x9d in position 58: character maps to \r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1917\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1917\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1916","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1916\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1916\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1916\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1916","id":812291984,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2NjgwNjY5","number":1916,"title":"Remove unused py_utils objects","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-19T19:51:25Z","updated_at":"2021-02-22T14:56:56Z","closed_at":"2021-02-22T13:32:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1916","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1916","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1916.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1916.patch","merged_at":"2021-02-22T13:32:49Z"},"body":"Remove unused\/unnecessary py_utils functions\/classes.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1916\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1916\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1915","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1915\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1915\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1915\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1915","id":812229654,"node_id":"MDU6SXNzdWU4MTIyMjk2NTQ=","number":1915,"title":"Unable to download `wiki_dpr`","user":{"login":"nitarakad","id":18504534,"node_id":"MDQ6VXNlcjE4NTA0NTM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18504534?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nitarakad","html_url":"https:\/\/github.com\/nitarakad","followers_url":"https:\/\/api.github.com\/users\/nitarakad\/followers","following_url":"https:\/\/api.github.com\/users\/nitarakad\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nitarakad\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nitarakad\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nitarakad\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nitarakad\/orgs","repos_url":"https:\/\/api.github.com\/users\/nitarakad\/repos","events_url":"https:\/\/api.github.com\/users\/nitarakad\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nitarakad\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-02-19T18:11:32Z","updated_at":"2021-03-03T17:40:48Z","closed_at":"2021-03-03T17:40:48Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to download the `wiki_dpr` dataset. Specifically, I want to download `psgs_w100.multiset.no_index` with no embeddings\/no index. In order to do so, I ran:\r\n\r\n`curr_dataset = load_dataset(\"wiki_dpr\", embeddings_name=\"multiset\", index_name=\"no_index\")` \r\n\r\nHowever, I got the following error:\r\n`datasets.utils.info_utils.UnexpectedDownloadedFile: {'embeddings_index'}`\r\n\r\nI tried adding in flags `with_embeddings=False` and `with_index=False`:\r\n\r\n`curr_dataset = load_dataset(\"wiki_dpr\", with_embeddings=False, with_index=False, embeddings_name=\"multiset\", index_name=\"no_index\")`\r\n\r\nBut I got the following error:\r\n`raise ExpectedMoreDownloadedFiles(str(set(expected_checksums) - set(recorded_checksums)))\r\ndatasets.utils.info_utils.ExpectedMoreDownloadedFiles: {\u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_5\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_15\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_30\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_36\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_18\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_41\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_13\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_48\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_10\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_23\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_14\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_34\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_43\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_40\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_47\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_3\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_24\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_7\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_33\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_46\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_42\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_27\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_29\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_26\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_22\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_4\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_20\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_39\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_6\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_16\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_8\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_35\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_49\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_17\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_25\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_0\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_38\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_12\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_44\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_1\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_32\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_19\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_31\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_37\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_9\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_11\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_21\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_28\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_45\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_2\u2019}`\r\n\r\nIs there anything else I need to set to download the dataset?\r\n\r\n**UPDATE**: just running `curr_dataset = load_dataset(\"wiki_dpr\", with_embeddings=False, with_index=False)` gives me the same error.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1915\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1915\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1914","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1914\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1914\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1914\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1914","id":812149201,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2NTYyNTkz","number":1914,"title":"Fix logging imports and make all datasets use library logger","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-19T16:12:34Z","updated_at":"2021-02-21T19:48:03Z","closed_at":"2021-02-21T19:48:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1914","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1914","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1914.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1914.patch","merged_at":"2021-02-21T19:48:03Z"},"body":"Fix library relative logging imports and make all datasets use library logger.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1914\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1914\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1913","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1913\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1913\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1913\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1913","id":812127307,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2NTQ0NjQw","number":1913,"title":"Add keep_linebreaks parameter to text loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-19T15:43:45Z","updated_at":"2021-02-19T18:36:12Z","closed_at":"2021-02-19T18:36:11Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1913","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1913","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1913.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1913.patch","merged_at":"2021-02-19T18:36:11Z"},"body":"As asked in #870 and https:\/\/github.com\/huggingface\/transformers\/issues\/10269 there should be a parameter to keep the linebreaks when loading a text dataset.\r\ncc @sgugger @jncasey","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1913\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1913\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1912","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1912\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1912\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1912\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1912","id":812034140,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2NDY2ODQx","number":1912,"title":"Update: WMT - use mirror links","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-19T13:42:34Z","updated_at":"2021-02-24T13:44:53Z","closed_at":"2021-02-24T13:44:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1912","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1912","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1912.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1912.patch","merged_at":"2021-02-24T13:44:53Z"},"body":"As asked in #1892 I created mirrors of the data hosted on statmt.org and updated the wmt scripts.\r\nNow downloading the wmt datasets is blazing fast :)\r\n\r\ncc @stas00 @patrickvonplaten ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1912\/reactions","total_count":4,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":4,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1912\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1911","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1911\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1911\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1911\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1911","id":812009956,"node_id":"MDU6SXNzdWU4MTIwMDk5NTY=","number":1911,"title":"Saving processed dataset running infinitely","user":{"login":"ayubSubhaniya","id":20911334,"node_id":"MDQ6VXNlcjIwOTExMzM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20911334?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ayubSubhaniya","html_url":"https:\/\/github.com\/ayubSubhaniya","followers_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/followers","following_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/orgs","repos_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/repos","events_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-02-19T13:09:19Z","updated_at":"2021-02-23T07:34:44Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I have a text dataset of size 220M.\r\n\r\nFor pre-processing, I need to tokenize this and filter rows with the large sequence.\r\n\r\nMy tokenization took roughly 3hrs. I used map() with batch size 1024 and multi-process with 96 processes.\r\n\r\nfilter() function was way to slow, so I used a hack to use pyarrow filter table function, which is damm fast. Mentioned [here](https:\/\/github.com\/huggingface\/datasets\/issues\/1796)\r\n\r\n```dataset._data = dataset._data.filter(...)```\r\nIt took 1 hr for the filter.\r\n\r\nThen i use `save_to_disk()` on processed dataset and it is running forever.\r\n\r\nI have been waiting since 8 hrs, it has not written a single byte. \r\n\r\nInfact it has actually read from disk more than 100GB, screenshot below shows the stats using `iotop`. \r\nSecond process is the one.\r\n\"Screenshot\r\n\r\n\r\nI am not able to figure out, whether this is some issue with dataset library or that it is due to my hack for filter() function.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1911\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1911\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1910","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1910\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1910\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1910\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1910","id":811697108,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2MTg0MDQ3","number":1910,"title":"Adding CoNLLpp dataset.","user":{"login":"ZihanWangKi","id":21319243,"node_id":"MDQ6VXNlcjIxMzE5MjQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21319243?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZihanWangKi","html_url":"https:\/\/github.com\/ZihanWangKi","followers_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/followers","following_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/repos","events_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-19T05:12:30Z","updated_at":"2021-03-04T22:02:47Z","closed_at":"2021-03-04T22:02:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1910","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1910","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1910.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1910.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1910\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1910\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1907","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1907\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1907\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1907\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1907","id":811520569,"node_id":"MDU6SXNzdWU4MTE1MjA1Njk=","number":1907,"title":"DBPedia14 Dataset Checksum bug?","user":{"login":"francisco-perez-sorrosal","id":918006,"node_id":"MDQ6VXNlcjkxODAwNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918006?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal","html_url":"https:\/\/github.com\/francisco-perez-sorrosal","followers_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/followers","following_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/orgs","repos_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/repos","events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-18T22:25:48Z","updated_at":"2021-02-22T23:22:05Z","closed_at":"2021-02-22T23:22:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi there!!!\r\n\r\nI've been using successfully the DBPedia dataset (https:\/\/huggingface.co\/datasets\/dbpedia_14) with my codebase in the last couple of weeks, but in the last couple of days now I get this error:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \".\/conditional_classification\/basic_pipeline.py\", line 178, in \r\n main()\r\n File \".\/conditional_classification\/basic_pipeline.py\", line 128, in main\r\n corpus.load_data(limit_train_examples_per_class=args.data_args.train_examples_per_class,\r\n File \"\/home\/fp\/dev\/conditional_classification\/conditional_classification\/datasets_base.py\", line 83, in load_data\r\n datasets = load_dataset(self.name, split=dataset_split)\r\n File \"\/home\/fp\/anaconda3\/envs\/conditional\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 609, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/fp\/anaconda3\/envs\/conditional\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 526, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/fp\/anaconda3\/envs\/conditional\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 586, in _download_and_prepare\r\n verify_checksums(\r\n File \"\/home\/fp\/anaconda3\/envs\/conditional\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 39, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/drive.google.com\/uc?export=download&id=0Bz8a_Dbh9QhbQ2Vic1kxMmZZQ1k']\r\n```\r\n\r\nI've seen this has happened before in other datasets as reported in #537.\r\n\r\nI've tried clearing my cache and call again `load_dataset` but still is not working. My same codebase is successfully downloading and using other datasets (e.g. AGNews) without any problem, so I guess something has happened specifically to the DBPedia dataset in the last few days. \r\n\r\nCan you please check if there's a problem with the checksums? \r\n\r\nOr this is related to any other stuff? I've seen that the path in the cache for the dataset is `\/home\/fp\/.cache\/huggingface\/datasets\/d_bpedia14\/dbpedia_14\/2.0.0\/a70413e39e7a716afd0e90c9e53cb053691f56f9ef5fe317bd07f2c368e8e897...` and includes `d_bpedia14` instead maybe of `dbpedia_14`. Was this maybe a bug introduced recently?\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1907\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1907\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1906","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1906\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1906\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1906\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1906","id":811405274,"node_id":"MDU6SXNzdWU4MTE0MDUyNzQ=","number":1906,"title":"Feature Request: Support for Pandas `Categorical`","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-18T19:46:05Z","updated_at":"2021-02-23T14:38:50Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nfrom datasets import Dataset\r\nimport pandas as pd\r\nimport pyarrow\r\n\r\ndf = pd.DataFrame(pd.Series([\"a\", \"b\", \"c\", \"a\"], dtype=\"category\"))\r\npyarrow.Table.from_pandas(df)\r\nDataset.from_pandas(df)\r\n# Throws NotImplementedError\r\n# TODO(thom) this will need access to the dictionary as well (for labels). I.e. to the py_table\r\n```\r\n\r\nI'm curious if https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L796 could be built out in a way similar to `Sequence`?\r\n\r\ne.g. a `Map` class (or whatever name the maintainers might prefer) that can accept:\r\n\r\n```\r\nindex_type = generate_from_arrow_type(pa_type.index_type)\r\nvalue_type = generate_from_arrow_type(pa_type.value_type)\r\n```\r\n\r\nand then additional code points to modify:\r\n\r\n- FeatureType: https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L694\r\n- A branch to handle Map in get_nested_type: https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L719\r\n- I don't quite understand what `encode_nested_example` does but perhaps a branch there? https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L755\r\n- Similarly, I don't quite understand why `Sequence` is used this way in `generate_from_dict`, but perhaps a branch here? https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L775\r\n\r\nI couldn't find other usages of `Sequence` outside of defining specific datasets, so I'm not sure if that's a comprehensive set of touchpoints.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1906\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1906\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1905","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1905\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1905\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1905\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1905","id":811384174,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1OTIxMDk1","number":1905,"title":"Standardizing datasets.dtypes","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-18T19:15:31Z","updated_at":"2021-02-20T22:01:30Z","closed_at":"2021-02-20T22:01:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1905","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1905","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1905.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1905.patch","merged_at":null},"body":"This PR was further branched off of jdy-str-to-pyarrow-parsing, so it depends on https:\/\/github.com\/huggingface\/datasets\/pull\/1900 going first for the diff to be up-to-date (I'm not sure if there's a way for me to use jdy-str-to-pyarrow-parsing as a base branch while having it appear in the pull requests here).\r\n\r\nThis moves away from `str(pyarrow.DataType)` as the method of choice for creating dtypes, favoring an explicit mapping to a list of supported Value dtypes.\r\n\r\nI believe in practice this should be backward compatible, since anyone previously using Value() would only have been able to use dtypes that had an identically named pyarrow factory function, which are all explicitly supported here.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1905\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1905\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1904","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1904\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1904\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1904\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1904","id":811260904,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1ODE4MjA0","number":1904,"title":"Fix to_pandas for boolean ArrayXD","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-18T16:30:46Z","updated_at":"2021-02-18T17:10:03Z","closed_at":"2021-02-18T17:10:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1904","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1904","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1904.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1904.patch","merged_at":"2021-02-18T17:10:00Z"},"body":"As noticed in #1887 the conversion of a dataset with a boolean ArrayXD feature types fails because of the underlying ListArray conversion to numpy requires `zero_copy_only=False`.\r\n\r\nzero copy is available for all primitive types except booleans\r\nsee https:\/\/arrow.apache.org\/docs\/python\/generated\/pyarrow.Array.html#pyarrow.Array.to_numpy\r\nand https:\/\/issues.apache.org\/jira\/browse\/ARROW-2871?jql=text%20~%20%22boolean%20to_numpy%22\r\n\r\ncc @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1904\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1904\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1903","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1903\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1903\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1903\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1903","id":811145531,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1NzIwOTk2","number":1903,"title":"Initial commit for the addition of TIMIT dataset","user":{"login":"vrindaprabhu","id":16264631,"node_id":"MDQ6VXNlcjE2MjY0NjMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16264631?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vrindaprabhu","html_url":"https:\/\/github.com\/vrindaprabhu","followers_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/followers","following_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/orgs","repos_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/repos","events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-18T14:23:12Z","updated_at":"2021-03-01T09:39:12Z","closed_at":"2021-03-01T09:39:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1903","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1903","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1903.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1903.patch","merged_at":"2021-03-01T09:39:12Z"},"body":"Below points needs to be addressed:\r\n\r\n- Creation of dummy dataset is failing\r\n- Need to check on the data representation\r\n- License is not creative commons. Copyright: Portions \u00a9 1993 Trustees of the University of Pennsylvania\r\n\r\nAlso the links (_except the download_) point to the ami corpus! ;-)\r\n\r\n@patrickvonplaten Requesting your comments, will be happy to address them!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1903\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1903\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1902","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1902\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1902\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1902\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1902","id":810931171,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1NTQwMDM1","number":1902,"title":"Fix setimes_2 wmt urls","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-18T09:42:26Z","updated_at":"2021-02-18T09:55:41Z","closed_at":"2021-02-18T09:55:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1902","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1902","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1902.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1902.patch","merged_at":"2021-02-18T09:55:41Z"},"body":"Continuation of #1901 \r\nSome other urls were missing https","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1902\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1902\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1901","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1901\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1901\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1901\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1901","id":810845605,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1NDY5MDUy","number":1901,"title":"Fix OPUS dataset download errors","user":{"login":"YangWang92","id":3883941,"node_id":"MDQ6VXNlcjM4ODM5NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3883941?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/YangWang92","html_url":"https:\/\/github.com\/YangWang92","followers_url":"https:\/\/api.github.com\/users\/YangWang92\/followers","following_url":"https:\/\/api.github.com\/users\/YangWang92\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/YangWang92\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/YangWang92\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/YangWang92\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/YangWang92\/orgs","repos_url":"https:\/\/api.github.com\/users\/YangWang92\/repos","events_url":"https:\/\/api.github.com\/users\/YangWang92\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/YangWang92\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-18T07:39:41Z","updated_at":"2021-02-18T15:07:20Z","closed_at":"2021-02-18T09:39:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1901","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1901","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1901.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1901.patch","merged_at":"2021-02-18T09:39:21Z"},"body":"Replace http to https.\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/issues\/854\r\n\r\nhttps:\/\/discuss.huggingface.co\/t\/cannot-download-wmt16\/2081\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1901\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1901\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1900","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1900\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1900\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1900\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1900","id":810512488,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1MTkxNTc3","number":1900,"title":"Issue #1895: Bugfix for string_to_arrow timestamp[ns] support","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-17T20:26:04Z","updated_at":"2021-02-19T18:27:11Z","closed_at":"2021-02-19T18:27:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1900","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1900","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1900.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1900.patch","merged_at":"2021-02-19T18:27:11Z"},"body":"Should resolve https:\/\/github.com\/huggingface\/datasets\/issues\/1895\r\n\r\nThe main part of this PR adds additional parsing in `string_to_arrow` to convert the timestamp dtypes that result from `str(pa_type)` back into the pa.DataType TimestampType.\r\n\r\nWhile adding unit-testing, I noticed that support for the double\/float types also don't invert correctly, so I added them, which I believe would hypothetically make this section of `Value` redundant:\r\n\r\n```\r\n def __post_init__(self):\r\n if self.dtype == \"double\": # fix inferred type\r\n self.dtype = \"float64\"\r\n if self.dtype == \"float\": # fix inferred type\r\n self.dtype = \"float32\"\r\n```\r\n\r\nHowever, since I think Value.dtype is part of the public interface, removing that would result in a backward-incompatible change, so I didn't muck with that.\r\n\r\nThe rest of the PR consists of docstrings that I added while developing locally so I could keep track of which functions were supposed to be inverses of each other, and thought I'd include them initially in case you want to keep them around, but I'm happy to delete or remove any of them at your request!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1900\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1900\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1899","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1899\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1899\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1899\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1899","id":810308332,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1MDIxMjc4","number":1899,"title":"Fix: ALT - fix duplicated examples in alt-parallel","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-17T15:53:56Z","updated_at":"2021-02-17T17:20:49Z","closed_at":"2021-02-17T17:20:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1899","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1899","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1899.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1899.patch","merged_at":"2021-02-17T17:20:49Z"},"body":"As noticed in #1898 by @10-zin the examples of the `alt-paralel` configurations have all the same values for the `translation` field.\r\nThis was due to a bad copy of a python dict.\r\n\r\nThis PR fixes that.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1899\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1899\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1898","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1898\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1898\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1898\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1898","id":810157251,"node_id":"MDU6SXNzdWU4MTAxNTcyNTE=","number":1898,"title":"ALT dataset has repeating instances in all splits","user":{"login":"10-zin","id":33179372,"node_id":"MDQ6VXNlcjMzMTc5Mzcy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33179372?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/10-zin","html_url":"https:\/\/github.com\/10-zin","followers_url":"https:\/\/api.github.com\/users\/10-zin\/followers","following_url":"https:\/\/api.github.com\/users\/10-zin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/10-zin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/10-zin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/10-zin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/10-zin\/orgs","repos_url":"https:\/\/api.github.com\/users\/10-zin\/repos","events_url":"https:\/\/api.github.com\/users\/10-zin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/10-zin\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-02-17T12:51:42Z","updated_at":"2021-02-19T06:18:46Z","closed_at":"2021-02-19T06:18:46Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The [ALT](https:\/\/huggingface.co\/datasets\/alt) dataset has all the same instances within each split :\/\r\nSeemed like a great dataset for some experiments I wanted to carry out, especially since its medium-sized, and has all splits.\r\n\r\nWould be great if this could be fixed :)\r\n\r\nAdded a snapshot of the contents from `explore-datset` feature, for quick reference.\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/33179372\/108206321-442a2d00-714c-11eb-882f-b4b6e708ef9c.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1898\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1898\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1897","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1897\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1897\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1897\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1897","id":810113263,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0ODU3MTIy","number":1897,"title":"Fix PandasArrayExtensionArray conversion to native type","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-17T11:48:24Z","updated_at":"2021-02-17T13:15:16Z","closed_at":"2021-02-17T13:15:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1897","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1897","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1897.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1897.patch","merged_at":"2021-02-17T13:15:15Z"},"body":"To make the conversion to csv work in #1887 , we need PandasArrayExtensionArray used for multidimensional numpy arrays to be converted to pandas native types.\r\nHowever previously pandas.core.internals.ExtensionBlock.to_native_types would fail with an PandasExtensionArray because\r\n1. the PandasExtensionArray.isna method was wrong\r\n2. the conversion of a PandasExtensionArray to a numpy array with dtype=object was returning a multidimensional array while pandas excepts a 1D array in this case (more info [here](https:\/\/pandas.pydata.org\/pandas-docs\/stable\/reference\/api\/pandas.api.extensions.ExtensionArray.html#pandas.api.extensions.ExtensionArray))\r\n\r\nI fixed these two issues and now the conversion to native types works, and so is the export to csv.\r\ncc @SBrandeis ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1897\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1897\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1895","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1895\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1895\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1895\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1895","id":809630271,"node_id":"MDU6SXNzdWU4MDk2MzAyNzE=","number":1895,"title":"Bug Report: timestamp[ns] not recognized","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-02-16T20:38:04Z","updated_at":"2021-02-19T18:27:11Z","closed_at":"2021-02-19T18:27:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Repro:\r\n\r\n```\r\nfrom datasets import Dataset\r\nimport pandas as pd\r\nimport pyarrow\r\n\r\ndf = pd.DataFrame(pd.date_range(\"2018-01-01\", periods=3, freq=\"H\"))\r\npyarrow.Table.from_pandas(df)\r\nDataset.from_pandas(df)\r\n# Throws ValueError: Neither timestamp[ns] nor timestamp[ns]_ seems to be a pyarrow data type.\r\n```\r\n\r\nThe factory function seems to be just \"timestamp\": https:\/\/arrow.apache.org\/docs\/python\/generated\/pyarrow.timestamp.html#pyarrow.timestamp\r\n\r\nIt seems like https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L36-L43 could have a little bit of additional structure for handling these cases? I'd be happy to take a shot at opening a PR if I could receive some guidance on whether parsing something like `timestamp[ns]` and resolving it to timestamp('ns') is the goal of this method.\r\n\r\nAlternatively, if I'm using this incorrectly (e.g. is the expectation that we always provide a schema when timestamps are involved?), that would be very helpful to know as well!\r\n\r\n```\r\n$ pip list # only the relevant libraries\/versions\r\ndatasets 1.2.1\r\npandas 1.0.3\r\npyarrow 3.0.0\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1895\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1895\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1894","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1894\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1894\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1894\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1894","id":809609654,"node_id":"MDU6SXNzdWU4MDk2MDk2NTQ=","number":1894,"title":"benchmarking against MMapIndexedDataset","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-16T20:04:58Z","updated_at":"2021-02-17T18:52:28Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to benchmark my datasets based implementation against fairseq's [`MMapIndexedDataset`](https:\/\/github.com\/pytorch\/fairseq\/blob\/master\/fairseq\/data\/indexed_dataset.py#L365) and finding that, according to psrecord, my `datasets` implem uses about 3% more CPU memory and runs 1% slower for `wikitext103` (~1GB of tokens).\r\n\r\nQuestions:\r\n1) Is this (basically identical) performance expected? \r\n2) Is there a scenario where this library will outperform `MMapIndexedDataset`? (maybe more examples\/larger examples?)\r\n3) Should I be using different benchmarking tools than `psrecord`\/how do you guys do benchmarks?\r\n\r\nThanks in advance! Sam","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1894\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1894\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1893","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1893\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1893\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1893\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1893","id":809556503,"node_id":"MDU6SXNzdWU4MDk1NTY1MDM=","number":1893,"title":"wmt19 is broken","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-02-16T18:39:58Z","updated_at":"2021-03-03T17:42:02Z","closed_at":"2021-03-03T17:42:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"1. Check which lang pairs we have: `--dataset_name wmt19`:\r\n\r\nPlease pick one among the available configs: ['cs-en', 'de-en', 'fi-en', 'gu-en', 'kk-en', 'lt-en', 'ru-en', 'zh-en', 'fr-de']\r\n\r\n \r\n2. OK, let's pick `ru-en`:\r\n\r\n`--dataset_name wmt19 --dataset_config \"ru-en\"`\r\n\r\nno cookies:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \".\/run_seq2seq.py\", line 661, in \r\n main()\r\n File \".\/run_seq2seq.py\", line 317, in main\r\n datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 740, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/builder.py\", line 572, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/builder.py\", line 628, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/stas\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt19\/436092de5f3faaf0fc28bc84875475b384e90a5470fa6afaee11039ceddc5052\/wmt_utils.py\", line 755, in _split_generators\r\n downloaded_files = dl_manager.download_and_extract(urls_to_download)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/download_manager.py\", line 276, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/download_manager.py\", line 191, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/py_utils.py\", line 233, in map_nested\r\n mapped = [\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/py_utils.py\", line 234, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/py_utils.py\", line 190, in _single_map_nested\r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/py_utils.py\", line 190, in \r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/py_utils.py\", line 172, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/download_manager.py\", line 211, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 274, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 584, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/storage.googleapis.com\/tfdataset-data\/downloadataset\/uncorpus\/UNv1.0.en-ru.tar.gz\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1893\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1893\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1892","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1892\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1892\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1892\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1892","id":809554174,"node_id":"MDU6SXNzdWU4MDk1NTQxNzQ=","number":1892,"title":"request to mirror wmt datasets, as they are really slow to download","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-02-16T18:36:11Z","updated_at":"2021-10-26T06:55:42Z","closed_at":"2021-03-25T11:53:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Would it be possible to mirror the wmt data files under hf? Some of them take hours to download and not because of the local speed. They are all quite small datasets, just extremely slow to download.\r\n\r\nThank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1892\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1892\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1891","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1891\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1891\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1891\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1891","id":809550001,"node_id":"MDU6SXNzdWU4MDk1NTAwMDE=","number":1891,"title":"suggestion to improve a missing dataset error","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-16T18:29:13Z","updated_at":"2021-02-16T18:30:14Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I was using `--dataset_name wmt19` all was good. Then thought perhaps wmt20 is out, so I tried to use `--dataset_name wmt20`, got 3 different errors (1 repeated twice), none telling me the real issue - that `wmt20` isn't in the `datasets`:\r\n\r\n```\r\nTrue, predict_with_generate=True)\r\nTraceback (most recent call last):\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 323, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 274, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 584, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/wmt20\/wmt20.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 335, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 274, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 584, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/wmt20\/wmt20.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \".\/run_seq2seq.py\", line 661, in \r\n main()\r\n File \".\/run_seq2seq.py\", line 317, in main\r\n datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 706, in load_dataset\r\n module_path, hash, resolved_file_path = prepare_module(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 343, in prepare_module\r\n raise FileNotFoundError(\r\nFileNotFoundError: Couldn't find file locally at wmt20\/wmt20.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/wmt20\/wmt20.py.\r\nThe file is also not present on the master branch on github.\r\n```\r\n\r\nSuggestion: if it is not in a local path, check that there is an actual `https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/wmt20` first and assert \"dataset `wmt20` doesn't exist in datasets\", rather than trying to find a load script - since the whole repo is not there.\r\n\r\nThe error occured when running:\r\n```\r\ncd examples\/seq2seq\r\nexport BS=16; rm -r output_dir; PYTHONPATH=..\/..\/src USE_TF=0 CUDA_VISIBLE_DEVICES=0 python .\/run_seq2seq.py --model_name_or_path t5-small --output_dir output_dir --adam_eps 1e-06 --do_eval --evaluation_strategy=steps --label_smoothing 0.1 --learning_rate 3e-5 --logging_first_step --logging_steps 1000 --max_source_length 128 --max_target_length 128 --num_train_epochs 1 --overwrite_output_dir --per_device_eval_batch_size $BS --predict_with_generate --eval_steps 25000 --sortish_sampler --task translation_en_to_ro --val_max_target_length 128 --warmup_steps 500 --max_val_samples 500 --dataset_name wmt20 --dataset_config \"ro-en\" --source_prefix \"translate English to Romanian: \"\r\n```\r\n\r\nThanks.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1891\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1891\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1890","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1890\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1890\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1890\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1890","id":809395586,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0MjY0OTMx","number":1890,"title":"Reformat dataset cards section titles","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-16T15:11:47Z","updated_at":"2021-02-16T15:12:34Z","closed_at":"2021-02-16T15:12:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1890","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1890","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1890.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1890.patch","merged_at":"2021-02-16T15:12:33Z"},"body":"Titles are formatted like [Foo](#foo) instead of just Foo","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1890\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1890\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1889","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1889\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1889\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1889\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1889","id":809276015,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0MTY1NDAz","number":1889,"title":"Implement to_dict and to_pandas for Dataset","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-16T12:38:19Z","updated_at":"2021-02-18T18:42:37Z","closed_at":"2021-02-18T18:42:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1889","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1889","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1889.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1889.patch","merged_at":"2021-02-18T18:42:34Z"},"body":"With options to return a generator or the full dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1889\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1889\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1888","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1888\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1888\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1888\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1888","id":809241123,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0MTM2MDU4","number":1888,"title":"Docs for adding new column on formatted dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-16T11:45:00Z","updated_at":"2021-03-30T14:01:03Z","closed_at":"2021-02-16T11:58:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1888","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1888","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1888.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1888.patch","merged_at":"2021-02-16T11:58:57Z"},"body":"As mentioned in #1872 we should add in the documentation how the format gets updated when new columns are added\r\n\r\nClose #1872","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1888\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1888\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1887","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1887\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1887\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1887\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1887","id":809229809,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0MTI2NTMy","number":1887,"title":"Implement to_csv for Dataset","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-02-16T11:27:29Z","updated_at":"2021-02-19T09:41:59Z","closed_at":"2021-02-19T09:41:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1887","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1887","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1887.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1887.patch","merged_at":"2021-02-19T09:41:59Z"},"body":"cc @thomwolf \r\n\r\n`to_csv` supports passing either a file path or a *binary* file object\r\nThe writing is batched to avoid loading the whole table in memory","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1887\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1887\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1886","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1886\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1886\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1886\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1886","id":809221885,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0MTE5ODcz","number":1886,"title":"Common voice","user":{"login":"BirgerMoell","id":1704131,"node_id":"MDQ6VXNlcjE3MDQxMzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1704131?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BirgerMoell","html_url":"https:\/\/github.com\/BirgerMoell","followers_url":"https:\/\/api.github.com\/users\/BirgerMoell\/followers","following_url":"https:\/\/api.github.com\/users\/BirgerMoell\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BirgerMoell\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BirgerMoell\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BirgerMoell\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BirgerMoell\/orgs","repos_url":"https:\/\/api.github.com\/users\/BirgerMoell\/repos","events_url":"https:\/\/api.github.com\/users\/BirgerMoell\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BirgerMoell\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-16T11:16:10Z","updated_at":"2021-03-09T18:51:31Z","closed_at":"2021-03-09T18:51:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1886","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1886","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1886.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1886.patch","merged_at":"2021-03-09T18:51:31Z"},"body":"Started filling out information about the dataset and a dataset card.\r\n\r\nTo do\r\nCreate tagging file\r\nUpdate the common_voice.py file with more information","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1886\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1886\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1885","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1885\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1885\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1885\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1885","id":808881501,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczODQyNzcz","number":1885,"title":"add missing info on how to add large files","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-15T23:46:39Z","updated_at":"2021-02-16T16:22:19Z","closed_at":"2021-02-16T11:44:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1885","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1885","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1885.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1885.patch","merged_at":"2021-02-16T11:44:12Z"},"body":"Thanks to @lhoestq's instructions I was able to add data files to a custom dataset repo. This PR is attempting to tell others how to do the same if they need to.\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1885\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1885\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1884","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1884\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1884\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1884\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1884","id":808755894,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNzQwNzI5","number":1884,"title":"dtype fix when using numpy arrays","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-15T18:55:25Z","updated_at":"2021-07-30T11:01:18Z","closed_at":"2021-07-30T11:01:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1884","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1884","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1884.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1884.patch","merged_at":null},"body":"As discussed in #625 this fix lets the user preserve the dtype of numpy array to pyarrow array which was getting lost due to conversion of numpy array -> list -> pyarrow array","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1884\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1884\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1883","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1883\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1883\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1883\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1883","id":808750623,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNzM2NTIz","number":1883,"title":"Add not-in-place implementations for several dataset transforms","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-15T18:44:26Z","updated_at":"2021-02-24T14:54:49Z","closed_at":"2021-02-24T14:53:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1883","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1883","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1883.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1883.patch","merged_at":"2021-02-24T14:53:26Z"},"body":"Should we deprecate in-place versions of such methods?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1883\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1883\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1882","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1882\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1882\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1882\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1882","id":808716576,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNzA4OTEw","number":1882,"title":"Create Remote Manager","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-15T17:36:24Z","updated_at":"2021-03-08T16:15:10Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1882","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1882","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1882.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1882.patch","merged_at":null},"body":"Refactoring to separate the concern of remote (HTTP\/FTP requests) management.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1882\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1882\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1881","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1881\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1881\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1881\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1881","id":808578200,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNTk1Nzkw","number":1881,"title":"`list_datasets()` returns a list of strings, not objects","user":{"login":"pminervini","id":227357,"node_id":"MDQ6VXNlcjIyNzM1Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/227357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pminervini","html_url":"https:\/\/github.com\/pminervini","followers_url":"https:\/\/api.github.com\/users\/pminervini\/followers","following_url":"https:\/\/api.github.com\/users\/pminervini\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pminervini\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pminervini\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pminervini\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pminervini\/orgs","repos_url":"https:\/\/api.github.com\/users\/pminervini\/repos","events_url":"https:\/\/api.github.com\/users\/pminervini\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pminervini\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-15T14:20:15Z","updated_at":"2021-02-15T15:09:49Z","closed_at":"2021-02-15T15:09:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1881","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1881","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1881.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1881.patch","merged_at":"2021-02-15T15:09:48Z"},"body":"Here and there in the docs there is still stuff like this:\r\n\r\n```python\r\n>>> datasets_list = list_datasets()\r\n>>> print(', '.join(dataset.id for dataset in datasets_list))\r\n```\r\n\r\nHowever, my understanding is that `list_datasets()` returns a list of strings rather than a list of objects.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1881\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1881\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1880","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1880\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1880\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1880\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1880","id":808563439,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNTgzNjg0","number":1880,"title":"Update multi_woz_v22 checksums","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-15T14:00:18Z","updated_at":"2021-02-15T14:18:19Z","closed_at":"2021-02-15T14:18:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1880","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1880","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1880.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1880.patch","merged_at":"2021-02-15T14:18:18Z"},"body":"As noticed in #1876 the checksums of this dataset are outdated.\r\nI updated them in this PR","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1880\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1880\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1879","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1879\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1879\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1879\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1879","id":808541442,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNTY1NDAx","number":1879,"title":"Replace flatten_nested","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-15T13:29:40Z","updated_at":"2021-02-19T18:35:14Z","closed_at":"2021-02-19T18:35:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1879","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1879","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1879.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1879.patch","merged_at":"2021-02-19T18:35:14Z"},"body":"Replace `flatten_nested` with `NestedDataStructure.flatten`.\r\n\r\nThis is a first step towards having all NestedDataStructure logic as a separated concern, independent of the caller\/user of the data structure.\r\n\r\nEventually, all checks (whether the underlying data is list, dict, etc.) will be only inside this class.\r\n\r\nI have also generalized the flattening, and now it handles multiple levels of nesting.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1879\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1879\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1878","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1878\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1878\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1878\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1878","id":808526883,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNTUyODk3","number":1878,"title":"Add LJ Speech dataset","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-15T13:10:42Z","updated_at":"2021-02-15T19:39:41Z","closed_at":"2021-02-15T14:18:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1878","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1878","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1878.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1878.patch","merged_at":"2021-02-15T14:18:09Z"},"body":"This PR adds the LJ Speech dataset (https:\/\/keithito.com\/LJ-Speech-Dataset\/)\r\nAs requested by #1841 \r\nThe ASR format is based on #1767 \r\n\r\nThere are a couple of quirks that should be addressed:\r\n- I tagged this dataset as `other-other-automatic-speech-recognition` and `other-other-text-to-speech` (as classified by paperswithcode). Since the number of speech datasets is about to grow, maybe these categories should be added to the main list? \r\n- Similarly to #1767 this dataset uses only a single dummy sample to reduce the zip size (`wav`s are quite heavy). Is there a plan to allow LFS or S3 usage for dummy data in the repo?\r\n- The dataset is distributed under the Public Domain license, which is not used anywhere else in the repo, AFAIK. Do you think Public Domain is worth adding to the tagger app as well?\r\n\r\nPinging @patrickvonplaten to review","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1878\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1878\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1877","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1877\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1877\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1877\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1877","id":808462272,"node_id":"MDU6SXNzdWU4MDg0NjIyNzI=","number":1877,"title":"Allow concatenation of both in-memory and on-disk datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-02-15T11:39:46Z","updated_at":"2021-03-26T16:51:58Z","closed_at":"2021-03-26T16:51:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This is a prerequisite for the addition of the `add_item` feature (see #1870).\r\nCurrently there is one assumption that we would need to change: a dataset is either fully in memory (dataset._data_files is empty), or the dataset can be reloaded from disk (using the dataset._data_files).\r\nThis assumption is used for pickling for example:\r\n- in-memory dataset can just be pickled\/unpickled in-memory\r\n- on-disk dataset can be unloaded to only keep the filepaths when pickling, and then reloaded from the disk when unpickling\r\n\r\nMaybe let's have a design that allows a Dataset to have a Table that can be rebuilt from heterogenous sources like in-memory tables or on-disk tables ? This could also be further extended in the future\r\n\r\nOne idea would be to define a list of sources and each source implements a way to reload its corresponding pyarrow Table.\r\nThen the dataset would be the concatenation of all these tables.\r\n\r\nDepending on the source type, the serialization using pickle would be different. In-memory data would be copied while on-disk data would simply be replaced by the path to these data.\r\n\r\nIf you have some ideas you would like to share about the design\/API feel free to do so :)\r\n\r\ncc @albertvillanova ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1877\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1877\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1876","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1876\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1876\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1876\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1876","id":808025859,"node_id":"MDU6SXNzdWU4MDgwMjU4NTk=","number":1876,"title":" load_dataset(\"multi_woz_v22\") NonMatchingChecksumError","user":{"login":"Vincent950129","id":5945326,"node_id":"MDQ6VXNlcjU5NDUzMjY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5945326?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Vincent950129","html_url":"https:\/\/github.com\/Vincent950129","followers_url":"https:\/\/api.github.com\/users\/Vincent950129\/followers","following_url":"https:\/\/api.github.com\/users\/Vincent950129\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Vincent950129\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Vincent950129\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Vincent950129\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Vincent950129\/orgs","repos_url":"https:\/\/api.github.com\/users\/Vincent950129\/repos","events_url":"https:\/\/api.github.com\/users\/Vincent950129\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Vincent950129\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-14T19:14:48Z","updated_at":"2021-08-04T18:08:00Z","closed_at":"2021-08-04T18:08:00Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, it seems that loading the multi_woz_v22 dataset gives a NonMatchingChecksumError.\r\n\r\nTo reproduce:\r\n\r\n`dataset = load_dataset('multi_woz_v22','v2.2_active_only',split='train')`\r\n\r\n\r\nThis will give the following error:\r\n\r\n```\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/dialog_acts.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_001.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_003.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_004.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_005.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_006.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_007.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_008.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_009.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_010.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_012.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_013.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_014.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_015.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_016.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_017.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/dev\/dialogues_001.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/dev\/dialogues_002.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/test\/dialogues_001.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/test\/dialogues_002.json']\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1876\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1876\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1875","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1875\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1875\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1875\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1875","id":807887267,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczMDM2NzE0","number":1875,"title":"Adding sari metric","user":{"login":"ddhruvkr","id":6061911,"node_id":"MDQ6VXNlcjYwNjE5MTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6061911?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ddhruvkr","html_url":"https:\/\/github.com\/ddhruvkr","followers_url":"https:\/\/api.github.com\/users\/ddhruvkr\/followers","following_url":"https:\/\/api.github.com\/users\/ddhruvkr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ddhruvkr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ddhruvkr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ddhruvkr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ddhruvkr\/orgs","repos_url":"https:\/\/api.github.com\/users\/ddhruvkr\/repos","events_url":"https:\/\/api.github.com\/users\/ddhruvkr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ddhruvkr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-14T04:38:35Z","updated_at":"2021-02-17T15:56:27Z","closed_at":"2021-02-17T15:56:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1875","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1875","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1875.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1875.patch","merged_at":"2021-02-17T15:56:26Z"},"body":"Adding SARI metric that is used in evaluation of text simplification. This is required as part of the GEM benchmark.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1875\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1875\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1874","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1874\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1874\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1874\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1874","id":807786094,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyOTYzMjAy","number":1874,"title":"Adding Europarl Bilingual dataset","user":{"login":"lucadiliello","id":23355969,"node_id":"MDQ6VXNlcjIzMzU1OTY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23355969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucadiliello","html_url":"https:\/\/github.com\/lucadiliello","followers_url":"https:\/\/api.github.com\/users\/lucadiliello\/followers","following_url":"https:\/\/api.github.com\/users\/lucadiliello\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucadiliello\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucadiliello\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucadiliello\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucadiliello\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucadiliello\/repos","events_url":"https:\/\/api.github.com\/users\/lucadiliello\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucadiliello\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-02-13T17:02:04Z","updated_at":"2021-03-04T10:38:22Z","closed_at":"2021-03-04T10:38:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1874","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1874","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1874.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1874.patch","merged_at":"2021-03-04T10:38:22Z"},"body":"Implementation of Europarl bilingual dataset from described [here](https:\/\/opus.nlpl.eu\/Europarl.php).\r\n\r\nThis dataset allows to use every language pair detailed in the original dataset. The loading script manages also the small errors contained in the original dataset (in very rare cases (1 over 10M) there are some keys that references to inexistent sentences).\r\nI chose to follow the the style of a similar dataset available in this repository: `multi_para_crawl`.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1874\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1874\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1873","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1873\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1873\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1873\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1873","id":807750745,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyOTM4MTYy","number":1873,"title":"add iapp_wiki_qa_squad","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-13T13:34:27Z","updated_at":"2021-02-16T14:21:58Z","closed_at":"2021-02-16T14:21:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1873","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1873","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1873.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1873.patch","merged_at":"2021-02-16T14:21:58Z"},"body":"`iapp_wiki_qa_squad` is an extractive question answering dataset from Thai Wikipedia articles.\r\nIt is adapted from [the original iapp-wiki-qa-dataset](https:\/\/github.com\/iapp-technology\/iapp-wiki-qa-dataset)\r\nto [SQuAD](https:\/\/rajpurkar.github.io\/SQuAD-explorer\/) format, resulting in\r\n5761\/742\/739 questions from 1529\/191\/192 articles.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1873\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1873\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1872","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1872\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1872\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1872\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1872","id":807711935,"node_id":"MDU6SXNzdWU4MDc3MTE5MzU=","number":1872,"title":"Adding a new column to the dataset after set_format was called","user":{"login":"villmow","id":2743060,"node_id":"MDQ6VXNlcjI3NDMwNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2743060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/villmow","html_url":"https:\/\/github.com\/villmow","followers_url":"https:\/\/api.github.com\/users\/villmow\/followers","following_url":"https:\/\/api.github.com\/users\/villmow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/villmow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/villmow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/villmow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/villmow\/orgs","repos_url":"https:\/\/api.github.com\/users\/villmow\/repos","events_url":"https:\/\/api.github.com\/users\/villmow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/villmow\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-13T09:14:35Z","updated_at":"2021-03-30T14:01:45Z","closed_at":"2021-03-30T14:01:45Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, \r\n\r\nthanks for the nice library. I'm in the process of creating a custom dataset, which has a mix of tensors and lists of strings. I stumbled upon an error and want to know if its a problem on my side. \r\n\r\nI load some lists of strings and integers, then call `data.set_format(\"torch\", columns=[\"some_integer_column1\", \"some_integer_column2\"], output_all_columns=True)`. This converts the integer columns into tensors, but keeps the lists of strings as they are. I then call `map` to add a new column to my dataset, which is a **list of strings**. Once I iterate through my dataset, I get an error that the new column can't be converted into a tensor (which is probably caused by `set_format`). \r\n\r\nBelow some pseudo code:\r\n```python\r\n def augment_func(sample: Dict) -> Dict:\r\n # do something\r\n return {\r\n \"some_integer_column1\" : augmented_data[\"some_integer_column1\"], # <-- tensor\r\n \"some_integer_column2\" : augmented_data[\"some_integer_column2\"], # <-- tensor\r\n \"NEW_COLUMN\": targets, # <-- list of strings\r\n }\r\n\r\n\r\n data = datasets.load_dataset(__file__, data_dir=\"...\", split=\"train\")\r\n data.set_format(\"torch\", columns=[\"some_integer_column1\", \"some_integer_column2\"], output_all_columns=True)\r\n\r\n augmented_dataset = data.map(augment_func, batched=False)\r\n \r\n for sample in augmented_dataset:\r\n print(sample) # fails\r\n\r\n```\r\n\r\nand the exception:\r\n```python\r\nTraceback (most recent call last):\r\n File \"dataset.py\", line 487, in \r\n main()\r\n File \"dataset.py\", line 471, in main\r\n for sample in augmented_dataset:\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 697, in __iter__\r\n yield self._getitem(\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 1069, in _getitem\r\n outputs = self._convert_outputs(\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 890, in _convert_outputs\r\n v = map_nested(command, v, **map_nested_kwargs)\r\n File \"lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n return function(data_struct)\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 850, in command\r\n return [map_nested(command, i, **map_nested_kwargs) for i in x]\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 850, in \r\n return [map_nested(command, i, **map_nested_kwargs) for i in x]\r\n File \"lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n return function(data_struct)\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 850, in command\r\n return [map_nested(command, i, **map_nested_kwargs) for i in x]\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 850, in \r\n return [map_nested(command, i, **map_nested_kwargs) for i in x]\r\n File \"lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n return function(data_struct)\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 851, in command\r\n return torch.tensor(x, **format_kwargs)\r\nTypeError: new(): invalid data type 'str'\r\n```\r\n\r\nThanks!\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1872\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1872\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1871","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1871\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1871\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1871\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1871","id":807697671,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyODk5Nzgz","number":1871,"title":"Add newspop dataset","user":{"login":"frankier","id":299380,"node_id":"MDQ6VXNlcjI5OTM4MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/299380?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/frankier","html_url":"https:\/\/github.com\/frankier","followers_url":"https:\/\/api.github.com\/users\/frankier\/followers","following_url":"https:\/\/api.github.com\/users\/frankier\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/frankier\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/frankier\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/frankier\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/frankier\/orgs","repos_url":"https:\/\/api.github.com\/users\/frankier\/repos","events_url":"https:\/\/api.github.com\/users\/frankier\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/frankier\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-13T07:31:23Z","updated_at":"2021-03-08T10:12:45Z","closed_at":"2021-03-08T10:12:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1871","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1871","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1871.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1871.patch","merged_at":"2021-03-08T10:12:45Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1871\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1871\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1870","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1870\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1870\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1870\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1870","id":807306564,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyNTc4Mjc4","number":1870,"title":"Implement Dataset add_item","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/3","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3\/labels","id":6644287,"node_id":"MDk6TWlsZXN0b25lNjY0NDI4Nw==","number":3,"title":"1.7","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":3,"state":"closed","created_at":"2021-04-09T13:16:31Z","updated_at":"2021-05-31T16:20:53Z","due_on":"2021-05-14T07:00:00Z","closed_at":"2021-05-31T16:20:53Z"},"comments":5,"created_at":"2021-02-12T15:03:46Z","updated_at":"2021-04-23T10:01:31Z","closed_at":"2021-04-23T10:01:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1870","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1870","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1870.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1870.patch","merged_at":"2021-04-23T10:01:30Z"},"body":"Implement `Dataset.add_item`.\r\n\r\nClose #1854.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1870\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1870\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1869","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1869\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1869\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1869\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1869","id":807159835,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyNDU0NTMy","number":1869,"title":"Remove outdated commands in favor of huggingface-cli","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-12T11:28:10Z","updated_at":"2021-02-12T16:13:09Z","closed_at":"2021-02-12T16:13:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1869","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1869","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1869.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1869.patch","merged_at":"2021-02-12T16:13:08Z"},"body":"Removing the old user commands since `huggingface_hub` is going to be used instead.\r\ncc @julien-c ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1869\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1869\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1868","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1868\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1868\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1868\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1868","id":807138159,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyNDM2MjA0","number":1868,"title":"Update oscar sizes","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-12T10:55:35Z","updated_at":"2021-02-12T11:03:07Z","closed_at":"2021-02-12T11:03:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1868","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1868","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1868.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1868.patch","merged_at":"2021-02-12T11:03:06Z"},"body":"This commit https:\/\/github.com\/huggingface\/datasets\/commit\/837a152e4724adc5308e2c4481908c00a8d93383 removed empty lines from the oscar deduplicated datasets. This PR updates the size of each deduplicated dataset to fix possible `NonMatchingSplitsSizesError` errors. cc @cahya-wirawan","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1868\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1868\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1867","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1867\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1867\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1867\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1867","id":807127181,"node_id":"MDU6SXNzdWU4MDcxMjcxODE=","number":1867,"title":"ERROR WHEN USING SET_TRANSFORM() ","user":{"login":"alexvaca0","id":35173563,"node_id":"MDQ6VXNlcjM1MTczNTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35173563?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexvaca0","html_url":"https:\/\/github.com\/alexvaca0","followers_url":"https:\/\/api.github.com\/users\/alexvaca0\/followers","following_url":"https:\/\/api.github.com\/users\/alexvaca0\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexvaca0\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexvaca0\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexvaca0\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexvaca0\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexvaca0\/repos","events_url":"https:\/\/api.github.com\/users\/alexvaca0\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexvaca0\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-02-12T10:38:31Z","updated_at":"2021-03-01T14:04:24Z","closed_at":"2021-02-24T12:00:43Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I'm trying to use dataset.set_transform(encode) as @lhoestq told me in this issue: https:\/\/github.com\/huggingface\/datasets\/issues\/1825#issuecomment-774202797\r\n\r\nHowever, when I try to use Trainer from transformers with such dataset, it throws an error:\r\n\r\n```\r\nTypeError: __init__() missing 1 required positional argument: 'transform'\r\n[INFO|trainer.py:357] 2021-02-12 10:18:09,893 >> The following columns in the training set don't have a corresponding argument in `AlbertForMaskedLM.forward` and have been ignored: text.\r\nException in device=TPU:0: __init__() missing 1 required positional argument: 'transform'\r\nTraceback (most recent call last):\r\n File \"\/anaconda3\/envs\/torch-xla-1.7\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 330, in _mp_start_fn\r\n _start_fn(index, pf_cfg, fn, args)\r\n File \"\/anaconda3\/envs\/torch-xla-1.7\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 324, in _start_fn\r\n fn(gindex, *args)\r\n File \"\/home\/alejandro_vaca\/transformers\/examples\/language-modeling\/run_mlm_wwm.py\", line 368, in _mp_fn\r\n main()\r\n File \"\/home\/alejandro_vaca\/transformers\/examples\/language-modeling\/run_mlm_wwm.py\", line 332, in main\r\n data_collator=data_collator,\r\n File \"\/anaconda3\/envs\/torch-xla-1.7\/lib\/python3.6\/site-packages\/transformers\/trainer.py\", line 286, in __init__\r\n self._remove_unused_columns(self.train_dataset, description=\"training\")\r\n File \"\/anaconda3\/envs\/torch-xla-1.7\/lib\/python3.6\/site-packages\/transformers\/trainer.py\", line 359, in _remove_unused_columns\r\n dataset.set_format(type=dataset.format[\"type\"], columns=columns)\r\n File \"\/home\/alejandro_vaca\/datasets\/src\/datasets\/fingerprint.py\", line 312, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/alejandro_vaca\/datasets\/src\/datasets\/arrow_dataset.py\", line 818, in set_format\r\n _ = get_formatter(type, **format_kwargs)\r\n File \"\/home\/alejandro_vaca\/datasets\/src\/datasets\/formatting\/__init__.py\", line 112, in get_formatter\r\n return _FORMAT_TYPES[format_type](**format_kwargs)\r\nTypeError: __init__() missing 1 required positional argument: 'transform'\r\n```\r\n\r\nThe code I'm using:\r\n\r\n```{python}\r\n\r\n def tokenize_function(examples):\r\n # Remove empty lines\r\n examples[\"text\"] = [line for line in examples[\"text\"] if len(line) > 0 and not line.isspace()]\r\n return tokenizer(examples[\"text\"], padding=padding, truncation=True, max_length=data_args.max_seq_length)\r\n\r\n datasets.set_transform(tokenize_function)\r\n\r\n data_collator = DataCollatorForWholeWordMask(tokenizer=tokenizer, mlm_probability=data_args.mlm_probability)\r\n\r\n # Initialize our Trainer\r\n trainer = Trainer(\r\n model=model,\r\n args=training_args,\r\n train_dataset=datasets[\"train\"] if training_args.do_train else None,\r\n eval_dataset=datasets[\"val\"] if training_args.do_eval else None,\r\n tokenizer=tokenizer,\r\n data_collator=data_collator,\r\n )\r\n```\r\n\r\nI've installed from source, master branch.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1867\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1867\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1866","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1866\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1866\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1866\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1866","id":807017816,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyMzM3NDQ1","number":1866,"title":"Add dataset for Financial PhraseBank","user":{"login":"frankier","id":299380,"node_id":"MDQ6VXNlcjI5OTM4MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/299380?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/frankier","html_url":"https:\/\/github.com\/frankier","followers_url":"https:\/\/api.github.com\/users\/frankier\/followers","following_url":"https:\/\/api.github.com\/users\/frankier\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/frankier\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/frankier\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/frankier\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/frankier\/orgs","repos_url":"https:\/\/api.github.com\/users\/frankier\/repos","events_url":"https:\/\/api.github.com\/users\/frankier\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/frankier\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-12T07:30:56Z","updated_at":"2021-02-17T14:22:36Z","closed_at":"2021-02-17T14:22:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1866","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1866","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1866.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1866.patch","merged_at":"2021-02-17T14:22:36Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1866\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1866\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1865","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1865\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1865\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1865\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1865","id":806388290,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcxODE2ODI2","number":1865,"title":"Updated OPUS Open Subtitles Dataset with metadata information","user":{"login":"Valahaar","id":19476123,"node_id":"MDQ6VXNlcjE5NDc2MTIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19476123?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Valahaar","html_url":"https:\/\/github.com\/Valahaar","followers_url":"https:\/\/api.github.com\/users\/Valahaar\/followers","following_url":"https:\/\/api.github.com\/users\/Valahaar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Valahaar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Valahaar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Valahaar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Valahaar\/orgs","repos_url":"https:\/\/api.github.com\/users\/Valahaar\/repos","events_url":"https:\/\/api.github.com\/users\/Valahaar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Valahaar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-11T13:26:26Z","updated_at":"2021-02-19T12:38:09Z","closed_at":"2021-02-12T16:59:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1865","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1865","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1865.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1865.patch","merged_at":"2021-02-12T16:59:44Z"},"body":"Close #1844 \r\n\r\nProblems:\r\n- I ran `python datasets-cli test datasets\/open_subtitles --save_infos --all_configs`, hence the change in `dataset_infos.json`, but it appears that the metadata features have not been added for all pairs. Any idea why that might be?\r\n- Possibly related to the above, I tried doing `pip uninstall datasets && pip install -e \".[dev]\"` after the changes, and loading the dataset via `load_dataset(\"open_subtitles\", lang1='hi', lang2='it')` to check if the update worked, but the loaded dataset did not contain the metadata fields (neither in the features nor doing `next(iter(dataset['train']))`). What step(s) did I miss?\r\n\r\nQuestions:\r\n- Is it ok to have a `classmethod` in there? I have not seen any in the few other datasets I have checked. I could make it a local method of the `_generate_examples` method, but I'd rather not duplicate the logic...","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1865\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1865\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1864","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1864\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1864\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1864\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1864","id":806172843,"node_id":"MDU6SXNzdWU4MDYxNzI4NDM=","number":1864,"title":"Add Winogender Schemas","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-11T08:18:38Z","updated_at":"2021-02-11T08:19:51Z","closed_at":"2021-02-11T08:19:51Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Winogender Schemas\r\n- **Description:** Winogender Schemas (inspired by Winograd Schemas) are minimal pairs of sentences that differ only by the gender of one pronoun in the sentence, designed to test for the presence of gender bias in automated coreference resolution systems.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/1804.09301\r\n- **Data:** https:\/\/github.com\/rudinger\/winogender-schemas (see data directory)\r\n- **Motivation:** Testing gender bias in automated coreference resolution systems, improve coreference resolution in general.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1864\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1864\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1863","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1863\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1863\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1863\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1863","id":806171311,"node_id":"MDU6SXNzdWU4MDYxNzEzMTE=","number":1863,"title":"Add WikiCREM","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-11T08:16:00Z","updated_at":"2021-03-07T07:27:13Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** WikiCREM\r\n- **Description:** A large unsupervised corpus for coreference resolution.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/1905.06290\r\n- **Github repo:**: https:\/\/github.com\/vid-koci\/bert-commonsense\r\n- **Data:** https:\/\/ora.ox.ac.uk\/objects\/uuid:c83e94bb-7584-41a1-aef9-85b0e764d9e3\r\n- **Motivation:** Coreference resolution, common sense reasoning\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1863\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1863\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1862","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1862\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1862\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1862\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1862","id":805722293,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcxMjc2ODAx","number":1862,"title":"Fix writing GPU Faiss index","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-10T17:32:03Z","updated_at":"2021-02-10T18:17:48Z","closed_at":"2021-02-10T18:17:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1862","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1862","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1862.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1862.patch","merged_at":"2021-02-10T18:17:47Z"},"body":"As reported in by @corticalstack there is currently an error when we try to save a faiss index on GPU.\r\n\r\nI fixed that by checking the index `getDevice()` method before calling `index_gpu_to_cpu`\r\n\r\nClose #1859 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1862\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1862\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1861","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1861\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1861\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1861\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1861","id":805631215,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcxMjAwNjA1","number":1861,"title":"Fix Limit url","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-10T15:44:56Z","updated_at":"2021-02-10T16:15:00Z","closed_at":"2021-02-10T16:14:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1861","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1861","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1861.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1861.patch","merged_at":"2021-02-10T16:14:58Z"},"body":"The test.json file of the Literal-Motion-in-Text (LiMiT) dataset was removed recently on the master branch of the repo at https:\/\/github.com\/ilmgut\/limit_dataset\r\n\r\nThis PR uses the previous commit sha to download the file instead, as suggested by @Paethon\r\n\r\nClose #1836 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1861\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1861\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1860","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1860\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1860\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1860\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1860","id":805510037,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcxMDk4OTIz","number":1860,"title":"Add loading from the Datasets Hub + add relative paths in download manager","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-10T13:24:11Z","updated_at":"2021-02-12T19:13:30Z","closed_at":"2021-02-12T19:13:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1860","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1860","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1860.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1860.patch","merged_at":"2021-02-12T19:13:29Z"},"body":"With the new Datasets Hub on huggingface.co it's now possible to have a dataset repo with your own script and data.\r\nFor example: https:\/\/huggingface.co\/datasets\/lhoestq\/custom_squad\/tree\/main contains one script and two json files.\r\n\r\nYou can load it using\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nd = load_dataset(\"lhoestq\/custom_squad\")\r\n```\r\n\r\nTo be able to use the data files that live right next to the dataset script on the repo in the hub, I added relative paths support for the DownloadManager. For example in the repo mentioned above, there are two json files that can be downloaded via\r\n```python\r\n_URLS = {\r\n \"train\": \"train-v1.1.json\",\r\n \"dev\": \"dev-v1.1.json\",\r\n}\r\ndownloaded_files = dl_manager.download_and_extract(_URLS)\r\n```\r\n\r\nTo make it work, I set the `base_path` of the DownloadManager to be the parent path of the dataset script (which comes from either a local path or a remote url).\r\n\r\nI also had to add the auth header of the requests to huggingface.co for private datasets repos. The token is fetched from [huggingface_hub](https:\/\/github.com\/huggingface\/huggingface_hub).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1860\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1860\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1859","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1859\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1859\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1859\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1859","id":805479025,"node_id":"MDU6SXNzdWU4MDU0NzkwMjU=","number":1859,"title":"Error \"in void don't know how to serialize this type of index\" when saving index to disk when device=0 (GPU)","user":{"login":"corticalstack","id":3995321,"node_id":"MDQ6VXNlcjM5OTUzMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3995321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/corticalstack","html_url":"https:\/\/github.com\/corticalstack","followers_url":"https:\/\/api.github.com\/users\/corticalstack\/followers","following_url":"https:\/\/api.github.com\/users\/corticalstack\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/corticalstack\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/corticalstack\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/corticalstack\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/corticalstack\/orgs","repos_url":"https:\/\/api.github.com\/users\/corticalstack\/repos","events_url":"https:\/\/api.github.com\/users\/corticalstack\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/corticalstack\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-10T12:41:00Z","updated_at":"2021-02-10T18:32:12Z","closed_at":"2021-02-10T18:17:47Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Error serializing faiss index. Error as follows:\r\n\r\n`Error in void faiss::write_index(const faiss::Index*, faiss::IOWriter*) at \/home\/conda\/feedstock_root\/build_artifacts\/faiss-split_1612472484670\/work\/faiss\/impl\/index_write.cpp:453: don't know how to serialize this type of index`\r\n\r\n\r\nNote:\r\n\r\n`torch.cuda.is_available()` reports:\r\n\r\n```\r\nCuda is available\r\ncuda:0\r\n\r\n```\r\n\r\nAdding index, device=0 for GPU.\r\n\r\n`dataset.add_faiss_index(column='embeddings', index_name='idx_embeddings', device=0)`\r\n\r\nHowever, during a quick debug, self.faiss_index has no attr \"device\" when checked in` search.py, method save`, so fails to transform gpu index to cpu index. If I add index without device, index is saved OK.\r\n\r\n\r\n```\r\ndef save(self, file: str):\r\n \"\"\"Serialize the FaissIndex on disk\"\"\"\r\n import faiss # noqa: F811\r\n\r\n if (\r\n hasattr(self.faiss_index, \"device\")\r\n and self.faiss_index.device is not None\r\n and self.faiss_index.device > -1\r\n ):\r\n index = faiss.index_gpu_to_cpu(self.faiss_index)\r\n else:\r\n index = self.faiss_index\r\n faiss.write_index(index, file)\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1859\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1859\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1858","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1858\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1858\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1858\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1858","id":805477774,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcxMDcxNzIx","number":1858,"title":"Clean config getenvs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-10T12:39:14Z","updated_at":"2021-02-10T15:52:30Z","closed_at":"2021-02-10T15:52:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1858","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1858","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1858.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1858.patch","merged_at":"2021-02-10T15:52:29Z"},"body":"Following #1848 \r\nRemove double getenv calls and fix one issue with rarfile\r\n\r\ncc @albertvillanova ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1858\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1858\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1857","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1857\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1857\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1857\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1857","id":805391107,"node_id":"MDU6SXNzdWU4MDUzOTExMDc=","number":1857,"title":"Unable to upload \"community provided\" dataset - 400 Client Error","user":{"login":"mwrzalik","id":1376337,"node_id":"MDQ6VXNlcjEzNzYzMzc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1376337?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mwrzalik","html_url":"https:\/\/github.com\/mwrzalik","followers_url":"https:\/\/api.github.com\/users\/mwrzalik\/followers","following_url":"https:\/\/api.github.com\/users\/mwrzalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mwrzalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mwrzalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mwrzalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mwrzalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/mwrzalik\/repos","events_url":"https:\/\/api.github.com\/users\/mwrzalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mwrzalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-10T10:39:01Z","updated_at":"2021-08-03T05:06:13Z","closed_at":"2021-08-03T05:06:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\ni'm trying to a upload a dataset as described [here](https:\/\/huggingface.co\/docs\/datasets\/v1.2.0\/share_dataset.html#sharing-a-community-provided-dataset). This is what happens:\r\n\r\n``` \r\n$ datasets-cli login\r\n$ datasets-cli upload_dataset my_dataset\r\nAbout to upload file \/path\/to\/my_dataset\/dataset_infos.json to S3 under filename my_dataset\/dataset_infos.json and namespace username\r\nAbout to upload file \/path\/to\/my_dataset\/my_dataset.py to S3 under filename my_dataset\/my_dataset.py and namespace username\r\nProceed? [Y\/n] Y\r\nUploading... This might take a while if files are large\r\n400 Client Error: Bad Request for url: https:\/\/huggingface.co\/api\/datasets\/presign\r\nhuggingface.co migrated to a new model hosting system.\r\nYou need to upgrade to transformers v3.5+ to upload new models.\r\nMore info at https:\/\/discuss.hugginface.co or https:\/\/twitter.com\/julien_c. Thank you! \r\n```\r\nI'm using the latest releases of datasets and transformers.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1857\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1857\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1856","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1856\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1856\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1856\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1856","id":805360200,"node_id":"MDU6SXNzdWU4MDUzNjAyMDA=","number":1856,"title":"load_dataset(\"amazon_polarity\") NonMatchingChecksumError","user":{"login":"yanxi0830","id":19946372,"node_id":"MDQ6VXNlcjE5OTQ2Mzcy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19946372?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yanxi0830","html_url":"https:\/\/github.com\/yanxi0830","followers_url":"https:\/\/api.github.com\/users\/yanxi0830\/followers","following_url":"https:\/\/api.github.com\/users\/yanxi0830\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yanxi0830\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yanxi0830\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yanxi0830\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yanxi0830\/orgs","repos_url":"https:\/\/api.github.com\/users\/yanxi0830\/repos","events_url":"https:\/\/api.github.com\/users\/yanxi0830\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yanxi0830\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-02-10T10:00:56Z","updated_at":"2021-07-21T12:59:51Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, it seems that loading the amazon_polarity dataset gives a NonMatchingChecksumError.\r\n\r\nTo reproduce:\r\n```\r\nload_dataset(\"amazon_polarity\")\r\n```\r\nThis will give the following error:\r\n```\r\n---------------------------------------------------------------------------\r\nNonMatchingChecksumError Traceback (most recent call last)\r\n in ()\r\n----> 1 dataset = load_dataset(\"amazon_polarity\")\r\n\r\n3 frames\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 37 if len(bad_urls) > 0:\r\n 38 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 39 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 40 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 41 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/drive.google.com\/u\/0\/uc?id=0Bz8a_Dbh9QhbaW12WVVZS2drcnM&export=download']\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1856\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1856\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1855","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1855\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1855\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1855\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1855","id":805256579,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcwODkzNDY3","number":1855,"title":"Minor fix in the docs","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-10T07:27:43Z","updated_at":"2021-02-10T12:33:09Z","closed_at":"2021-02-10T12:33:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1855","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1855","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1855.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1855.patch","merged_at":"2021-02-10T12:33:09Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1855\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1855\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1854","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1854\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1854\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1854\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1854","id":805204397,"node_id":"MDU6SXNzdWU4MDUyMDQzOTc=","number":1854,"title":"Feature Request: Dataset.add_item","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-02-10T06:06:00Z","updated_at":"2021-04-23T10:01:30Z","closed_at":"2021-04-23T10:01:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to integrate `huggingface\/datasets` functionality into `fairseq`, which requires (afaict) being able to build a dataset through an `add_item` method, such as https:\/\/github.com\/pytorch\/fairseq\/blob\/master\/fairseq\/data\/indexed_dataset.py#L318, as opposed to loading all the text into arrow, and then `dataset.map(binarizer)`.\r\nIs this possible at the moment? Is there an example? I'm happy to use raw `pa.Table` but not sure whether it will support uneven length entries.\r\n\r\n### Desired API\r\n\r\n```python\r\nimport numpy as np\r\ntokenized: List[np.NDArray[np.int64]] = [np.array([4,4,2]), np.array([8,6,5,5,2]), np.array([3,3,31,5])\r\n\r\ndef build_dataset_from_tokenized(tokenized: List[np.NDArray[int]]) -> Dataset:\r\n \"\"\"FIXME\"\"\"\r\n dataset = EmptyDataset()\r\n for t in tokenized: dataset.append(t)\r\n return dataset\r\nds = build_dataset_from_tokenized(tokenized)\r\nassert (ds[0] == np.array([4,4,2])).all()\r\n```\r\n\r\n### What I tried\r\ngrep, google for \"add one entry at a time\", \"datasets.append\"\r\n\r\n### Current Code\r\nThis code achieves the same result but doesn't fit into the `add_item` abstraction.\r\n\r\n```python\r\n dataset = load_dataset('text', data_files={'train': 'train.txt'})\r\n tokenizer = RobertaTokenizerFast.from_pretrained('roberta-base', max_length=4096)\r\n def tokenize_function(examples):\r\n ids = tokenizer(examples['text'], return_attention_mask=False)['input_ids']\r\n return {'input_ids': [x[1:] for x in ids]}\r\n ds = dataset.map(tokenize_function, batched=True, num_proc=4, remove_columns=['text'], load_from_cache_file=not overwrite_cache)\r\n\tprint(ds['train'][0]) => np array\r\n```\r\n\r\nThanks in advance!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1854\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1854\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1853","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1853\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1853\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1853\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1853","id":804791166,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcwNTAwMjc4","number":1853,"title":"Configure library root logger at the module level","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-09T18:11:12Z","updated_at":"2021-02-10T12:32:34Z","closed_at":"2021-02-10T12:32:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1853","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1853","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1853.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1853.patch","merged_at":"2021-02-10T12:32:34Z"},"body":"Configure library root logger at the datasets.logging module level (singleton-like).\r\n\r\nBy doing it this way:\r\n- we are sure configuration is done only once: module level code is only runned once\r\n- no need of global variable\r\n- no need of threading lock","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1853\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1853\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1852","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1852\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1852\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1852\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1852","id":804633033,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcwMzY3NTU1","number":1852,"title":"Add Arabic Speech Corpus ","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-09T15:02:26Z","updated_at":"2021-02-11T10:18:55Z","closed_at":"2021-02-11T10:18:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1852","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1852","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1852.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1852.patch","merged_at":"2021-02-11T10:18:54Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1852\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1852\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1851","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1851\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1851\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1851\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1851","id":804523174,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcwMjc2MTk5","number":1851,"title":"set bert_score version dependency","user":{"login":"pvl","id":3596,"node_id":"MDQ6VXNlcjM1OTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3596?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pvl","html_url":"https:\/\/github.com\/pvl","followers_url":"https:\/\/api.github.com\/users\/pvl\/followers","following_url":"https:\/\/api.github.com\/users\/pvl\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pvl\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pvl\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pvl\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pvl\/orgs","repos_url":"https:\/\/api.github.com\/users\/pvl\/repos","events_url":"https:\/\/api.github.com\/users\/pvl\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pvl\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-09T12:51:07Z","updated_at":"2021-02-09T14:21:48Z","closed_at":"2021-02-09T14:21:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1851","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1851","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1851.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1851.patch","merged_at":"2021-02-09T14:21:48Z"},"body":"Set the bert_score version in requirements since previous versions of bert_score will fail with datasets (closes #843)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1851\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1851\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1850","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1850\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1850\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1850\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1850","id":804412249,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcwMTg0MDAx","number":1850,"title":"Add cord 19 dataset","user":{"login":"ggdupont","id":5583410,"node_id":"MDQ6VXNlcjU1ODM0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5583410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ggdupont","html_url":"https:\/\/github.com\/ggdupont","followers_url":"https:\/\/api.github.com\/users\/ggdupont\/followers","following_url":"https:\/\/api.github.com\/users\/ggdupont\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ggdupont\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ggdupont\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ggdupont\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ggdupont\/orgs","repos_url":"https:\/\/api.github.com\/users\/ggdupont\/repos","events_url":"https:\/\/api.github.com\/users\/ggdupont\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ggdupont\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-09T10:22:08Z","updated_at":"2021-02-09T15:16:26Z","closed_at":"2021-02-09T15:16:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1850","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1850","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1850.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1850.patch","merged_at":"2021-02-09T15:16:25Z"},"body":"Initial version only reading the metadata in CSV.\r\n\r\n### Checklist:\r\n- [x] Create the dataset script \/datasets\/my_dataset\/my_dataset.py using the template\r\n- [x] Fill the _DESCRIPTION and _CITATION variables\r\n- [x] Implement _infos(), _split_generators() and _generate_examples()\r\n- [x] Make sure that the BUILDER_CONFIGS class attribute is filled with the different configurations of the dataset and that the BUILDER_CONFIG_CLASS is specified if there is a custom config class.\r\n- [x] Generate the metadata file dataset_infos.json for all configurations\r\n- [x] Generate the dummy data dummy_data.zip files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card README.md using the template and at least fill the tags\r\n- [x] Both tests for the real data and the dummy data pass.\r\n\r\n### Extras:\r\n- [x] add more metadata\r\n- [x] add full text\r\n- [x] add pre-computed document embedding","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1850\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1850\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1849","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1849\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1849\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1849\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1849","id":804292971,"node_id":"MDU6SXNzdWU4MDQyOTI5NzE=","number":1849,"title":"Add TIMIT","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-09T07:29:41Z","updated_at":"2021-03-15T05:59:37Z","closed_at":"2021-03-15T05:59:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *TIMIT*\r\n- **Description:** *The TIMIT corpus of read speech has been designed to provide speech data for the acquisition of acoustic-phonetic knowledge and for the development and evaluation of automatic speech recognition systems*\r\n\r\n- **Paper:** *Homepage*: http:\/\/groups.inf.ed.ac.uk\/ami\/corpus\/ \/ *Wikipedia*: https:\/\/en.wikipedia.org\/wiki\/TIMIT\r\n- **Data:** *https:\/\/deepai.org\/dataset\/timit*\r\n- **Motivation:** Important speech dataset\r\n\r\n\r\nIf interested in tackling this issue, feel free to tag @patrickvonplaten\r\n\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1849\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1849\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1848","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1848\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1848\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1848\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1848","id":803826506,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY5Njg5ODU1","number":1848,"title":"Refactoring: Create config module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-08T18:43:51Z","updated_at":"2021-02-10T12:29:35Z","closed_at":"2021-02-10T12:29:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1848","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1848","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1848.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1848.patch","merged_at":"2021-02-10T12:29:35Z"},"body":"Refactorize configuration settings into their own module.\r\n\r\nThis could be seen as a Pythonic singleton-like approach. Eventually a config instance class might be created.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1848\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1848\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1847","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1847\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1847\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1847\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1847","id":803824694,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY5Njg4NDY0","number":1847,"title":"[Metrics] Add word error metric metric","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-08T18:41:15Z","updated_at":"2021-02-09T17:53:21Z","closed_at":"2021-02-09T17:53:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1847","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1847","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1847.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1847.patch","merged_at":"2021-02-09T17:53:21Z"},"body":"This PR adds the word error rate metric to datasets. \r\nWER: https:\/\/en.wikipedia.org\/wiki\/Word_error_rate\r\nfor speech recognition. WER is the main metric used in ASR. \r\n\r\n`jiwer` seems to be a solid library (see https:\/\/github.com\/asteroid-team\/asteroid\/pull\/329#discussion_r525158939)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1847\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1847\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1846","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1846\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1846\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1846\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1846","id":803806380,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY5NjczMzcy","number":1846,"title":"Make DownloadManager downloaded\/extracted paths accessible","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-08T18:14:42Z","updated_at":"2021-02-25T14:10:18Z","closed_at":"2021-02-25T14:10:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1846","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1846","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1846.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1846.patch","merged_at":"2021-02-25T14:10:18Z"},"body":"Make accessible the file paths downloaded\/extracted by DownloadManager.\r\n\r\nClose #1831.\r\n\r\nThe approach:\r\n- I set these paths as DownloadManager attributes: these are DownloadManager's concerns\r\n- To access to these from DatasetBuilder, I set the DownloadManager instance as DatasetBuilder attribute: object composition","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1846\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1846\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1845","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1845\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1845\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1845\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1845","id":803714493,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY5NTk2MTIz","number":1845,"title":"Enable logging propagation and remove logging handler","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-08T16:22:13Z","updated_at":"2021-02-09T14:22:38Z","closed_at":"2021-02-09T14:22:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1845","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1845","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1845.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1845.patch","merged_at":"2021-02-09T14:22:37Z"},"body":"We used to have logging propagation disabled because of this issue: https:\/\/github.com\/tensorflow\/tensorflow\/issues\/26691\r\nBut since it's now fixed we should re-enable it. This is important to keep the default logging behavior for users, and propagation is also needed for pytest fixtures as asked in #1826 \r\n\r\nI also removed the handler that was added since, according to the logging [documentation](https:\/\/docs.python.org\/3\/howto\/logging.html#configuring-logging-for-a-library):\r\n> It is strongly advised that you do not add any handlers other than NullHandler to your library\u2019s loggers. This is because the configuration of handlers is the prerogative of the application developer who uses your library. The application developer knows their target audience and what handlers are most appropriate for their application: if you add handlers \u2018under the hood\u2019, you might well interfere with their ability to carry out unit tests and deliver logs which suit their requirements.\r\n\r\nIt could have been useful if we wanted to have a custom formatter for the logging but I think it's more important to keep the logging as default to not interfere with the users' logging management.\r\n\r\nTherefore I also removed the two methods `datasets.logging.enable_default_handler` and `datasets.logging.disable_default_handler`.\r\n\r\ncc @albertvillanova this should let you use capsys\/caplog in pytest\r\ncc @LysandreJik @sgugger if you want to do the same in `transformers`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1845\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1845\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1844","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1844\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1844\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1844\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1844","id":803588125,"node_id":"MDU6SXNzdWU4MDM1ODgxMjU=","number":1844,"title":"Update Open Subtitles corpus with original sentence IDs","user":{"login":"Valahaar","id":19476123,"node_id":"MDQ6VXNlcjE5NDc2MTIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19476123?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Valahaar","html_url":"https:\/\/github.com\/Valahaar","followers_url":"https:\/\/api.github.com\/users\/Valahaar\/followers","following_url":"https:\/\/api.github.com\/users\/Valahaar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Valahaar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Valahaar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Valahaar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Valahaar\/orgs","repos_url":"https:\/\/api.github.com\/users\/Valahaar\/repos","events_url":"https:\/\/api.github.com\/users\/Valahaar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Valahaar\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-02-08T13:55:13Z","updated_at":"2021-02-12T17:38:58Z","closed_at":"2021-02-12T17:38:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi! It would be great if you could add the original sentence ids to [Open Subtitles](https:\/\/huggingface.co\/datasets\/open_subtitles).\r\n\r\nI can think of two reasons: first, it's possible to gather sentences for an entire document (the original ids contain media id, subtitle file id and sentence id), therefore somewhat allowing for document-level machine translation (and other document-level stuff which could be cool to have); second, it's possible to have parallel sentences in multiple languages, as they share the same ids across bitexts.\r\n\r\nI think I should tag @abhishekkrthakur as he's the one who added it in the first place.\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1844\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1844\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1843","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1843\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1843\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1843\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1843","id":803565393,"node_id":"MDU6SXNzdWU4MDM1NjUzOTM=","number":1843,"title":"MustC Speech Translation","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":18,"created_at":"2021-02-08T13:27:45Z","updated_at":"2021-05-14T14:53:34Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *IWSLT19*\r\n- **Description:** *The Speech Translation Task addresses the translation of English audio into German and Portuguese text.*\r\n- **Hompage:** *https:\/\/sites.google.com\/view\/iwslt-evaluation-2019\/speech-translation*\r\n- **Data:** *https:\/\/sites.google.com\/view\/iwslt-evaluation-2019\/speech-translation* - all data under \"Allowed Training Data\" and \"Development and Evalutaion Data for TED\/How2\"\r\n- **Motivation:** Important speech dataset\r\n\r\nIf interested in tackling this issue, feel free to tag @patrickvonplaten\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1843\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1843\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1842","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1842\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1842\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1842\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1842","id":803563149,"node_id":"MDU6SXNzdWU4MDM1NjMxNDk=","number":1842,"title":"Add AMI Corpus","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-08T13:25:00Z","updated_at":"2021-02-09T07:26:16Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *AMI*\r\n- **Description:** *The AMI Meeting Corpus is a multi-modal data set consisting of 100 hours of meeting recordings. For a gentle introduction to the corpus, see the corpus overview. To access the data, follow the directions given there. Around two-thirds of the data has been elicited using a scenario in which the participants play different roles in a design team, taking a design project from kick-off to completion over the course of a day. The rest consists of naturally occurring meetings in a range of domains. Detailed information can be found in the documentation section.*\r\n\r\n- **Paper:** *Homepage*: http:\/\/groups.inf.ed.ac.uk\/ami\/corpus\/\r\n- **Data:** *http:\/\/groups.inf.ed.ac.uk\/ami\/download\/* - Select all cases in 1) and select \"Individual Headsets\" & \"Microphone array\" for 2)\r\n- **Motivation:** Important speech dataset\r\n\r\n\r\nIf interested in tackling this issue, feel free to tag @patrickvonplaten\r\n\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1842\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1842\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1841","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1841\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1841\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1841\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1841","id":803561123,"node_id":"MDU6SXNzdWU4MDM1NjExMjM=","number":1841,"title":"Add ljspeech","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-08T13:22:26Z","updated_at":"2021-03-15T05:59:02Z","closed_at":"2021-03-15T05:59:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *ljspeech*\r\n- **Description:** *This is a public domain speech dataset consisting of 13,100 short audio clips of a single speaker reading passages from 7 non-fiction books. A transcription is provided for each clip. Clips vary in length from 1 to 10 seconds and have a total length of approximately 24 hours.\r\n\r\nThe texts were published between 1884 and 1964, and are in the public domain. The audio was recorded in 2016-17 by the LibriVox project and is also in the public domain.)*\r\n- **Paper:** *Homepage*: https:\/\/keithito.com\/LJ-Speech-Dataset\/\r\n- **Data:** *https:\/\/keithito.com\/LJ-Speech-Dataset\/*\r\n- **Motivation:** Important speech dataset\r\n- **TFDatasets Implementation**: https:\/\/www.tensorflow.org\/datasets\/catalog\/ljspeech\r\nIf interested in tackling this issue, feel free to tag @patrickvonplaten\r\n\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1841\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1841\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1840","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1840\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1840\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1840\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1840","id":803560039,"node_id":"MDU6SXNzdWU4MDM1NjAwMzk=","number":1840,"title":"Add common voice","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-02-08T13:21:05Z","updated_at":"2022-01-05T16:19:51Z","closed_at":"2021-03-15T05:56:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *common voice*\r\n- **Description:** *Mozilla Common Voice Dataset*\r\n- **Paper:** Homepage: https:\/\/voice.mozilla.org\/en\/datasets\r\n- **Data:** https:\/\/voice.mozilla.org\/en\/datasets\r\n- **Motivation:** Important speech dataset\r\n- **TFDatasets Implementation**: https:\/\/www.tensorflow.org\/datasets\/catalog\/common_voice\r\nIf interested in tackling this issue, feel free to tag @patrickvonplaten\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1840\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1840\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1839","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1839\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1839\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1839\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1839","id":803559164,"node_id":"MDU6SXNzdWU4MDM1NTkxNjQ=","number":1839,"title":"Add Voxforge","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-08T13:19:56Z","updated_at":"2021-02-08T13:28:31Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *voxforge* \r\n- **Description:** *VoxForge is a language classification dataset. It consists of user submitted audio clips submitted to the website. In this release, data from 6 languages is collected - English, Spanish, French, German, Russian, and Italian. Since the website is constantly updated, and for the sake of reproducibility, this release contains only recordings submitted prior to 2020-01-01. The samples are splitted between train, validation and testing so that samples from each speaker belongs to exactly one split.*\r\n- **Paper:** *Homepage*: http:\/\/www.voxforge.org\/\r\n- **Data:** *http:\/\/www.voxforge.org\/home\/downloads*\r\n- **Motivation:** Important speech dataset\r\n- **TFDatasets Implementation**: https:\/\/www.tensorflow.org\/datasets\/catalog\/voxforge\r\nIf interested in tackling this issue, feel free to tag @patrickvonplaten\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1839\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1839\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1838","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1838\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1838\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1838\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1838","id":803557521,"node_id":"MDU6SXNzdWU4MDM1NTc1MjE=","number":1838,"title":"Add tedlium","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-08T13:17:52Z","updated_at":"2021-04-09T15:57:41Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *tedlium*\r\n- **Description:** *The TED-LIUM 1-3 corpus is English-language TED talks, with transcriptions, sampled at 16kHz. It contains about 118 hours of speech.*\r\n- **Paper:** Homepage: http:\/\/www.openslr.org\/7\/, https:\/\/lium.univ-lemans.fr\/en\/ted-lium2\/ &, https:\/\/www.openslr.org\/51\/\r\n- **Data:** http:\/\/www.openslr.org\/7\/\r\n- **Motivation:** Important speech dataset\r\n- **TFDatasets Implementation**: https:\/\/www.tensorflow.org\/datasets\/catalog\/tedlium\r\nIf interested in tackling this issue, feel free to tag @patrickvonplaten\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1838\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1838\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1837","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1837\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1837\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1837\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1837","id":803555650,"node_id":"MDU6SXNzdWU4MDM1NTU2NTA=","number":1837,"title":"Add VCTK","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-08T13:15:28Z","updated_at":"2021-12-28T15:05:08Z","closed_at":"2021-12-28T15:05:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *VCTK*\r\n- **Description:** *This CSTR VCTK Corpus includes speech data uttered by 110 English speakers with various accents. Each speaker reads out about 400 sentences, which were selected from a newspaper, the rainbow passage and an elicitation paragraph used for the speech accent archive.*\r\n- **Paper:** Homepage: https:\/\/datashare.ed.ac.uk\/handle\/10283\/3443\r\n- **Data:** https:\/\/datashare.ed.ac.uk\/handle\/10283\/3443\r\n- **Motivation:** Important speech dataset\r\n- **TFDatasets Implementation**: https:\/\/www.tensorflow.org\/datasets\/catalog\/vctk\r\n\r\nIf interested in tackling this issue, feel free to tag @patrickvonplaten\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1837\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1837\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1836","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1836\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1836\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1836\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1836","id":803531837,"node_id":"MDU6SXNzdWU4MDM1MzE4Mzc=","number":1836,"title":"test.json has been removed from the limit dataset repo (breaks dataset)","user":{"login":"Paethon","id":237550,"node_id":"MDQ6VXNlcjIzNzU1MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/237550?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Paethon","html_url":"https:\/\/github.com\/Paethon","followers_url":"https:\/\/api.github.com\/users\/Paethon\/followers","following_url":"https:\/\/api.github.com\/users\/Paethon\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Paethon\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Paethon\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Paethon\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Paethon\/orgs","repos_url":"https:\/\/api.github.com\/users\/Paethon\/repos","events_url":"https:\/\/api.github.com\/users\/Paethon\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Paethon\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-08T12:45:53Z","updated_at":"2021-02-10T16:14:58Z","closed_at":"2021-02-10T16:14:58Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"https:\/\/github.com\/huggingface\/datasets\/blob\/16042b233dbff2a7585110134e969204c69322c3\/datasets\/limit\/limit.py#L51\r\n\r\nThe URL is not valid anymore since test.json has been removed in master for some reason. Directly referencing the last commit works:\r\n\r\n`https:\/\/raw.githubusercontent.com\/ilmgut\/limit_dataset\/0707d3989cd8848f0f11527c77dcf168fefd2b23\/data`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1836\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1836\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1835","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1835\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1835\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1835\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1835","id":803524790,"node_id":"MDU6SXNzdWU4MDM1MjQ3OTA=","number":1835,"title":"Add CHiME4 dataset","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-08T12:36:38Z","updated_at":"2021-02-08T13:13:31Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Chime4\r\n- **Description:** Chime4 is a dataset for automatic speech recognition. It is especially useful for evaluating models in a noisy environment and for multi-channel ASR\r\n- **Paper:** Dataset comes from a channel: http:\/\/spandh.dcs.shef.ac.uk\/chime_challenge\/CHiME4\/ . Results paper: \r\n- **Data:** http:\/\/spandh.dcs.shef.ac.uk\/chime_challenge\/CHiME4\/download.html\r\n- **Motivation:** So far there are very little datasets for speech in `datasets`. Only `lbirispeech_asr` so far.\r\n\r\nIf interested in tackling this issue, feel free to tag @patrickvonplaten\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1835\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1835\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1834","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1834\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1834\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1834\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1834","id":803517094,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY5NDMzNDA4","number":1834,"title":"Fixes base_url of limit dataset","user":{"login":"Paethon","id":237550,"node_id":"MDQ6VXNlcjIzNzU1MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/237550?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Paethon","html_url":"https:\/\/github.com\/Paethon","followers_url":"https:\/\/api.github.com\/users\/Paethon\/followers","following_url":"https:\/\/api.github.com\/users\/Paethon\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Paethon\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Paethon\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Paethon\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Paethon\/orgs","repos_url":"https:\/\/api.github.com\/users\/Paethon\/repos","events_url":"https:\/\/api.github.com\/users\/Paethon\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Paethon\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-08T12:26:35Z","updated_at":"2021-02-08T12:42:50Z","closed_at":"2021-02-08T12:42:50Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1834","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1834","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1834.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1834.patch","merged_at":null},"body":"`test.json` is not available in the master branch of the repository anymore. Linking to a specific commit.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1834\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1834\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1833","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1833\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1833\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1833\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1833","id":803120978,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY5MDk5MTUx","number":1833,"title":"Add OSCAR dataset card","user":{"login":"pjox","id":635220,"node_id":"MDQ6VXNlcjYzNTIyMA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/635220?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pjox","html_url":"https:\/\/github.com\/pjox","followers_url":"https:\/\/api.github.com\/users\/pjox\/followers","following_url":"https:\/\/api.github.com\/users\/pjox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pjox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pjox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pjox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pjox\/orgs","repos_url":"https:\/\/api.github.com\/users\/pjox\/repos","events_url":"https:\/\/api.github.com\/users\/pjox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pjox\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-02-08T01:39:49Z","updated_at":"2021-02-12T14:09:25Z","closed_at":"2021-02-12T14:08:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1833","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1833","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1833.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1833.patch","merged_at":"2021-02-12T14:08:24Z"},"body":"I added more information and completed the dataset card for OSCAR which was started by @lhoestq in his previous [PR](https:\/\/github.com\/huggingface\/datasets\/pull\/1824).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1833\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1833\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1832","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1832\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1832\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1832\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1832","id":802880897,"node_id":"MDU6SXNzdWU4MDI4ODA4OTc=","number":1832,"title":"Looks like nokogumbo is up-to-date now, so this is no longer needed.","user":{"login":"JimmyJim1","id":68724553,"node_id":"MDQ6VXNlcjY4NzI0NTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68724553?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JimmyJim1","html_url":"https:\/\/github.com\/JimmyJim1","followers_url":"https:\/\/api.github.com\/users\/JimmyJim1\/followers","following_url":"https:\/\/api.github.com\/users\/JimmyJim1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JimmyJim1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JimmyJim1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JimmyJim1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JimmyJim1\/orgs","repos_url":"https:\/\/api.github.com\/users\/JimmyJim1\/repos","events_url":"https:\/\/api.github.com\/users\/JimmyJim1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JimmyJim1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-07T06:52:07Z","updated_at":"2021-02-08T17:27:29Z","closed_at":"2021-02-08T17:27:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Looks like nokogumbo is up-to-date now, so this is no longer needed.\n\n__Originally posted by @dependabot in https:\/\/github.com\/discourse\/discourse\/pull\/11373#issuecomment-738993432__","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1832\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1832\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1831","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1831\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1831\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1831\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1831","id":802868854,"node_id":"MDU6SXNzdWU4MDI4Njg4NTQ=","number":1831,"title":"Some question about raw dataset download info in the project .","user":{"login":"svjack","id":27874014,"node_id":"MDQ6VXNlcjI3ODc0MDE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27874014?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/svjack","html_url":"https:\/\/github.com\/svjack","followers_url":"https:\/\/api.github.com\/users\/svjack\/followers","following_url":"https:\/\/api.github.com\/users\/svjack\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/svjack\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/svjack\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/svjack\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/svjack\/orgs","repos_url":"https:\/\/api.github.com\/users\/svjack\/repos","events_url":"https:\/\/api.github.com\/users\/svjack\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/svjack\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-02-07T05:33:36Z","updated_at":"2021-02-25T14:10:18Z","closed_at":"2021-02-25T14:10:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi , i review the code in \r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/conll2003\/conll2003.py\r\nin the _split_generators function is the truly logic of download raw datasets with dl_manager\r\nand use Conll2003 cls by use import_main_class in load_dataset function\r\nMy question is that , with this logic it seems that i can not have the raw dataset download location\r\nin variable in downloaded_files in _split_generators.\r\nIf someone also want use huggingface datasets as raw dataset downloader,\r\nhow can he retrieve the raw dataset download path from attributes in \r\ndatasets.dataset_dict.DatasetDict ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1831\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1831\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1830","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1830\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1830\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1830\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1830","id":802790075,"node_id":"MDU6SXNzdWU4MDI3OTAwNzU=","number":1830,"title":"using map on loaded Tokenizer 10x - 100x slower than default Tokenizer?","user":{"login":"wumpusman","id":7662740,"node_id":"MDQ6VXNlcjc2NjI3NDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7662740?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wumpusman","html_url":"https:\/\/github.com\/wumpusman","followers_url":"https:\/\/api.github.com\/users\/wumpusman\/followers","following_url":"https:\/\/api.github.com\/users\/wumpusman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wumpusman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wumpusman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wumpusman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wumpusman\/orgs","repos_url":"https:\/\/api.github.com\/users\/wumpusman\/repos","events_url":"https:\/\/api.github.com\/users\/wumpusman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wumpusman\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-02-06T21:00:26Z","updated_at":"2021-02-24T21:56:14Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This could total relate to me misunderstanding particular call functions, but I added words to a GPT2Tokenizer, and saved it to disk (note I'm only showing snippets but I can share more) and the map function ran much slower: \r\n\r\n````\r\ndef save_tokenizer(original_tokenizer,text,path=\"simpledata\/tokenizer\"):\r\n words_unique = set(text.split(\" \"))\r\n for i in words_unique:\r\n original_tokenizer.add_tokens(i)\r\n original_tokenizer.save_pretrained(path)\r\n\r\ntokenizer2 = GPT2Tokenizer.from_pretrained(os.path.join(experiment_path,experiment_name,\"tokenizer_squad\"))\r\n\r\ntrain_set_baby=Dataset.from_dict({\"text\":[train_set[\"text\"][0][0:50]]})\r\n````\r\n\r\nI then applied the dataset map function on a fairly small set of text:\r\n\r\n```\r\n%%time\r\ntrain_set_baby = train_set_baby.map(lambda d:tokenizer2(d[\"text\"]),batched=True)\r\n\r\n```\r\n\r\n\r\nThe run time for train_set_baby.map was 6 seconds, and the batch itself was 2.6 seconds\r\n\r\n**100% 1\/1 [00:02<00:00, 2.60s\/ba] CPU times: user 5.96 s, sys: 36 ms, total: 5.99 s Wall time: 5.99 s**\r\n\r\nIn comparison using (even after adding additional tokens): \r\n`\r\ntokenizer = GPT2TokenizerFast.from_pretrained(\"gpt2\")`\r\n\r\n```\r\n%%time\r\ntrain_set_baby = train_set_baby.map(lambda d:tokenizer2(d[\"text\"]),batched=True)\r\n\r\n```\r\nThe time is \r\n**100% 1\/1 [00:00<00:00, 34.09ba\/s] CPU times: user 68.1 ms, sys: 16 \u00b5s, total: 68.1 ms Wall time: 62.9 ms**\r\n\r\nIt seems this might relate to the tokenizer save or load function, however, the issue appears to come up when I apply the loaded tokenizer to the map function. \r\n\r\nI should also add that playing around with the amount of words I add to the tokenizer before I save it to disk and load it into memory appears to impact the time it takes to run the map function. \r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1830\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1830\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1829","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1829\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1829\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1829\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1829","id":802693600,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY4NzgzNjA5","number":1829,"title":"Add Tweet Eval Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-06T12:36:25Z","updated_at":"2021-02-08T13:17:54Z","closed_at":"2021-02-08T13:17:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1829","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1829","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1829.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1829.patch","merged_at":"2021-02-08T13:17:53Z"},"body":"Closes Draft PR #1407. \r\n\r\nNotes:\r\n1. I have excluded `mapping.txt` from the dataset at it only contained the name mappings, which are already present in the ClassLabels.\r\n2. I have also exluded the textual names for the emojis mentioned in the [mapping](https:\/\/github.com\/cardiffnlp\/tweeteval\/blob\/main\/datasets\/emoji\/mapping.txt).\r\n3. I do not understand @abhishekkrthakur's example generator on #1407. Maybe he was trying to build up on code from some other dataset.\r\n\r\nRequesting @lhoestq to review.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1829\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1829\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1828","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1828\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1828\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1828\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1828","id":802449234,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY4NTkwNDM2","number":1828,"title":"Add CelebA Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-02-05T20:20:55Z","updated_at":"2021-02-18T14:17:07Z","closed_at":"2021-02-18T14:17:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1828","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1828","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1828.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1828.patch","merged_at":null},"body":"Trying to add CelebA Dataset. \r\nNeed help with testing. Loading examples takes a lot of time so I am unable to generate the `dataset_infos.json` and unable to test. Also, need help with creating `dummy_data.zip`.\r\n\r\nAdditionally, trying to load a few examples using `load_dataset('.\/datasets\/celeb_a',split='train[10:20]')` still loads all the examples (doesn't stop at 10).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1828\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1828\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1827","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1827\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1827\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1827\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1827","id":802353974,"node_id":"MDU6SXNzdWU4MDIzNTM5NzQ=","number":1827,"title":"Regarding On-the-fly Data Loading","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-05T17:43:48Z","updated_at":"2021-02-18T13:55:16Z","closed_at":"2021-02-18T13:55:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI was wondering if it is possible to load images\/texts as a batch during the training process, without loading the entire dataset on the RAM at any given point.\r\n\r\nThanks,\r\nGunjan","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1827\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1827\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1826","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1826\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1826\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1826\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1826","id":802074744,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY4Mjc4OTI2","number":1826,"title":"Print error message with filename when malformed CSV","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-05T11:07:59Z","updated_at":"2021-02-09T17:39:27Z","closed_at":"2021-02-09T17:39:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1826","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1826","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1826.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1826.patch","merged_at":"2021-02-09T17:39:26Z"},"body":"Print error message specifying filename when malformed CSV file.\r\n\r\nClose #1821","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1826\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1826\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1825","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1825\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1825\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1825\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1825","id":802073925,"node_id":"MDU6SXNzdWU4MDIwNzM5MjU=","number":1825,"title":"Datasets library not suitable for huge text datasets.","user":{"login":"alexvaca0","id":35173563,"node_id":"MDQ6VXNlcjM1MTczNTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35173563?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexvaca0","html_url":"https:\/\/github.com\/alexvaca0","followers_url":"https:\/\/api.github.com\/users\/alexvaca0\/followers","following_url":"https:\/\/api.github.com\/users\/alexvaca0\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexvaca0\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexvaca0\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexvaca0\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexvaca0\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexvaca0\/repos","events_url":"https:\/\/api.github.com\/users\/alexvaca0\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexvaca0\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-02-05T11:06:50Z","updated_at":"2021-03-30T14:04:01Z","closed_at":"2021-03-16T09:44:00Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI'm trying to use datasets library to load a 187GB dataset of pure text, with the intention of building a Language Model. The problem is that from the 187GB it goes to some TB when processed by Datasets. First of all, I think the pre-tokenizing step (with tokenizer.map()) is not really thought for datasets this big, but for fine-tuning datasets, as this process alone takes so much time, usually in expensive machines (due to the need of tpus - gpus) which is not being used for training. It would possibly be more efficient in such cases to tokenize each batch at training time (receive batch - tokenize batch - train with batch), so that the whole time the machine is up it's being used for training. \r\nMoreover, the pyarrow objects created from a 187 GB datasets are huge, I mean, we always receive OOM, or No Space left on device errors when only 10-12% of the dataset has been processed, and only that part occupies 2.1TB in disk, which is so many times the disk usage of the pure text (and this doesn't make sense, as tokenized texts should be lighter than pure texts).\r\n\r\nAny suggestions??","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1825\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1825\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1824","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1824\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1824\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1824\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1824","id":802048281,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY4MjU3MTU3","number":1824,"title":"Add OSCAR dataset card","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-05T10:30:26Z","updated_at":"2021-05-05T18:24:14Z","closed_at":"2021-02-08T11:30:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1824","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1824","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1824.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1824.patch","merged_at":null},"body":"I started adding the dataset card for OSCAR !\r\n\r\nFor now it's just basic info for all the different configurations in `Dataset Structure`.\r\nIn particular the Data Splits section tells how may samples there are for each config. The Data Instances section show an example for each config, and it also shows the size in MB. Since the Data Instances section is very long the user has to click to expand the info. I was able to generate it thanks to the tools made by @madlag and @yjernite :D\r\n\r\nCc @pjox could you help me with the other sections ? (Dataset Description, Dataset Creation, Considerations for Using the Data, Additional Information)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1824\/reactions","total_count":4,"+1":2,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1824\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1823","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1823\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1823\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1823\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1823","id":802042181,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY4MjUyMjIx","number":1823,"title":"Add FewRel Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2021-02-05T10:22:03Z","updated_at":"2021-03-01T11:56:20Z","closed_at":"2021-03-01T10:21:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1823","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1823","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1823.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1823.patch","merged_at":"2021-03-01T10:21:39Z"},"body":"Hi,\r\n\r\nThis PR closes this [Card](https:\/\/github.com\/huggingface\/datasets\/projects\/1#card-53285184) and Issue #1757.\r\n\r\nI wasn't sure how to add `pid2name` along with the dataset so I added it as a separate configuration. For each (head, tail, tokens) triplet, I have created one example. I have added the dictionary key as `\"relation\"` in the dataset. Additionally, for `pubmed_unsupervised`, I kept `\"relation\":\"\"` in the dictionary.\r\n\r\nPlease recommend better alternatives, if any.\r\n\r\nThanks,\r\nGunjan","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1823\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1823\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1822","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1822\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1822\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1822\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1822","id":802003835,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY4MjIxMzIz","number":1822,"title":"Add Hindi Discourse Analysis Natural Language Inference Dataset","user":{"login":"avinsit123","id":33565881,"node_id":"MDQ6VXNlcjMzNTY1ODgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33565881?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/avinsit123","html_url":"https:\/\/github.com\/avinsit123","followers_url":"https:\/\/api.github.com\/users\/avinsit123\/followers","following_url":"https:\/\/api.github.com\/users\/avinsit123\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/avinsit123\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/avinsit123\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/avinsit123\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/avinsit123\/orgs","repos_url":"https:\/\/api.github.com\/users\/avinsit123\/repos","events_url":"https:\/\/api.github.com\/users\/avinsit123\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/avinsit123\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-05T09:30:54Z","updated_at":"2021-02-15T09:57:39Z","closed_at":"2021-02-15T09:57:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1822","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1822","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1822.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1822.patch","merged_at":"2021-02-15T09:57:39Z"},"body":"# Dataset Card for Hindi Discourse Analysis Dataset\r\n\r\n## Table of Contents\r\n- [Dataset Description](#dataset-description)\r\n - [Dataset Summary](#dataset-summary)\r\n - [Supported Tasks](#supported-tasks-and-leaderboards)\r\n - [Languages](#languages)\r\n- [Dataset Structure](#dataset-structure)\r\n - [Data Instances](#data-instances)\r\n - [Data Fields](#data-fields)\r\n - [Data Splits](#data-splits)\r\n- [Dataset Creation](#dataset-creation)\r\n - [Curation Rationale](#curation-rationale)\r\n - [Source Data](#source-data)\r\n - [Annotations](#annotations)\r\n - [Personal and Sensitive Information](#personal-and-sensitive-information)\r\n- [Considerations for Using the Data](#considerations-for-using-the-data)\r\n - [Social Impact of Dataset](#social-impact-of-dataset)\r\n - [Discussion of Biases](#discussion-of-biases)\r\n - [Other Known Limitations](#other-known-limitations)\r\n- [Additional Information](#additional-information)\r\n - [Dataset Curators](#dataset-curators)\r\n - [Licensing Information](#licensing-information)\r\n - [Citation Information](#citation-information)\r\n - [Contributions](#contributions)\r\n\r\n## Dataset Description\r\n\r\n- HomePage : https:\/\/github.com\/midas-research\/hindi-nli-data\r\n- Paper : https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\r\n- Point of Contact : https:\/\/github.com\/midas-research\/hindi-nli-data\r\n\r\n### Dataset Summary\r\n\r\n- Dataset for Natural Language Inference in Hindi Language. Hindi Discourse Analysis (HDA) Dataset consists of textual-entailment pairs.\r\n- Each row of the Datasets if made up of 4 columns - Premise, Hypothesis, Label and Topic.\r\n- Premise and Hypothesis is written in Hindi while Entailment_Label is in English.\r\n- Entailment_label is of 2 types - entailed and not-entailed.\r\n- Entailed means that hypotheis can be inferred from premise and not-entailed means vice versa\r\n- Dataset can be used to train models for Natural Language Inference tasks in Hindi Language.\r\n\r\n### Supported Tasks and Leaderboards\r\n\r\n- Natural Language Inference for Hindi\r\n\r\n### Languages\r\n\r\n- Dataset is in Hindi\r\n\r\n## Dataset Structure\r\n\r\n- Data is structured in TSV format. \r\n- train, test and dev files are in seperate files\r\n\r\n\r\n### Dataset Instances\r\n\r\nAn example of 'train' looks as follows.\r\n\r\n```\r\n{'hypothesis': '\u092f\u0939 \u090f\u0915 \u0935\u0930\u094d\u0923\u0928\u093e\u0924\u094d\u092e\u0915 \u0915\u0925\u0928 \u0939\u0948\u0964', 'label': 1, 'premise': '\u091c\u0948\u0938\u0947 \u0909\u0938 \u0915\u093e \u0938\u093e\u0930\u093e \u091a\u0947\u0939\u0930\u093e \u0905\u092a\u0928\u093e \u0939\u094b \u0914\u0930 \u0906\u0901\u0916\u0947\u0902 \u0915\u093f\u0938\u0940 \u0926\u0942\u0938\u0930\u0947 \u0915\u0940 \u091c\u094b \u091a\u0947\u0939\u0930\u0947 \u092a\u0930 \u092a\u092a\u094b\u091f\u094b\u0902 \u0915\u0947 \u092a\u0940\u091b\u0947 \u092e\u0939\u0938\u0942\u0930 \u0915\u0930 \u0926\u0940 \u0917\u0908\u0902\u0964', 'topic': 1}\r\n\r\n\r\n```\r\n### Data Fields\r\n\r\n- Each row contatins 4 columns - premise, hypothesis, label and topic.\r\n\r\n### Data Splits\r\n\r\n- Train : 31892\r\n- Valid : 9460\r\n- Test : 9970\r\n\r\n## Dataset Creation\r\n\r\n- We employ a recasting technique from Poliak et al. (2018a,b) to convert publicly available Hindi Discourse Analysis classification datasets in Hindi and pose them as TE problems\r\n- In this recasting process, we build template hypotheses for each class in the label taxonomy\r\n- Then, we pair the original annotated sentence with each of the template hypotheses to create TE samples.\r\n- For more information on the recasting process, refer to paper https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\r\n\r\n### Source Data\r\n\r\nSource Dataset for the recasting process is the BBC Hindi Headlines Dataset(https:\/\/github.com\/NirantK\/hindi2vec\/releases\/tag\/bbc-hindi-v0.1)\r\n\r\n#### Initial Data Collection and Normalization\r\n\r\n- Initial Data was collected by members of MIDAS Lab from Hindi Websites. They crowd sourced the data annotation process and selected two random stories from our corpus and had the three annotators work on them independently and classify each sentence based on the discourse mode.\r\n- Please refer to this paper for detailed information: https:\/\/www.aclweb.org\/anthology\/2020.lrec-1.149\/\r\n- The Discourse is further classified into \"Argumentative\" , \"Descriptive\" , \"Dialogic\" , \"Informative\" and \"Narrative\" - 5 Clases.\r\n\r\n#### Who are the source language producers?\r\n\r\nPlease refer to this paper for detailed information: https:\/\/www.aclweb.org\/anthology\/2020.lrec-1.149\/\r\n\r\n### Annotations\r\n\r\n#### Annotation process\r\n\r\nAnnotation process has been described in Dataset Creation Section.\r\n\r\n#### Who are the annotators?\r\n\r\nAnnotation is done automatically by machine and corresponding recasting process.\r\n\r\n### Personal and Sensitive Information\r\n\r\nNo Personal and Sensitive Information is mentioned in the Datasets.\r\n\r\n## Considerations for Using the Data\r\n\r\nPls refer to this paper: https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\r\n\r\n### Discussion of Biases\r\n\r\nNo known bias exist in the dataset.\r\nPls refer to this paper: https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\r\n\r\n### Other Known Limitations\r\n\r\nNo other known limitations . Size of data may not be enough to train large models\r\n\r\n## Additional Information\r\n\r\nPls refer to this link: https:\/\/github.com\/midas-research\/hindi-nli-data\r\n\r\n### Dataset Curators\r\n\r\nIt is written in the repo : https:\/\/github.com\/midas-research\/hindi-nli-data that \r\n- This corpus can be used freely for research purposes.\r\n- The paper listed below provide details of the creation and use of the corpus. If you use the corpus, then please cite the paper.\r\n- If interested in commercial use of the corpus, send email to midas@iiitd.ac.in.\r\n- If you use the corpus in a product or application, then please credit the authors and Multimodal Digital Media Analysis Lab - Indraprastha Institute of Information Technology, New Delhi appropriately. Also, if you send us an email, we will be thrilled to know about how you have used the corpus.\r\n- Multimodal Digital Media Analysis Lab - Indraprastha Institute of Information Technology, New Delhi, India disclaims any responsibility for the use of the corpus and does not provide technical support. However, the contact listed above will be happy to respond to queries and clarifications.\r\n- Rather than redistributing the corpus, please direct interested parties to this page\r\n- Please feel free to send us an email:\r\n - with feedback regarding the corpus.\r\n - with information on how you have used the corpus.\r\n - if interested in having us analyze your data for natural language inference.\r\n - if interested in a collaborative research project.\r\n\r\n\r\n### Licensing Information\r\n\r\nCopyright (C) 2019 Multimodal Digital Media Analysis Lab - Indraprastha Institute of Information Technology, New Delhi (MIDAS, IIIT-Delhi).\r\nPls contact authors for any information on the dataset.\r\n\r\n### Citation Information\r\n\r\n```\r\n @inproceedings{uppal-etal-2020-two,\r\n title = \"Two-Step Classification using Recasted Data for Low Resource Settings\",\r\n author = \"Uppal, Shagun and\r\n Gupta, Vivek and\r\n Swaminathan, Avinash and\r\n Zhang, Haimin and\r\n Mahata, Debanjan and\r\n Gosangi, Rakesh and\r\n Shah, Rajiv Ratn and\r\n Stent, Amanda\",\r\n booktitle = \"Proceedings of the 1st Conference of the Asia-Pacific Chapter of the Association for Computational Linguistics and the 10th International Joint Conference on Natural Language Processing\",\r\n month = dec,\r\n year = \"2020\",\r\n address = \"Suzhou, China\",\r\n publisher = \"Association for Computational Linguistics\",\r\n url = \"https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\",\r\n pages = \"706--719\",\r\n abstract = \"An NLP model{'}s ability to reason should be independent of language. Previous works utilize Natural Language Inference (NLI) to understand the reasoning ability of models, mostly focusing on high resource languages like English. To address scarcity of data in low-resource languages such as Hindi, we use data recasting to create NLI datasets for four existing text classification datasets. Through experiments, we show that our recasted dataset is devoid of statistical irregularities and spurious patterns. We further study the consistency in predictions of the textual entailment models and propose a consistency regulariser to remove pairwise-inconsistencies in predictions. We propose a novel two-step classification method which uses textual-entailment predictions for classification task. We further improve the performance by using a joint-objective for classification and textual entailment. We therefore highlight the benefits of data recasting and improvements on classification performance using our approach with supporting experimental results.\",\r\n}\r\n```\r\n\r\n### Contributions\r\n\r\nThanks to [@avinsit123](https:\/\/github.com\/avinsit123) for adding this dataset.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1822\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1822\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1821","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1821\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1821\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1821\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1821","id":801747647,"node_id":"MDU6SXNzdWU4MDE3NDc2NDc=","number":1821,"title":"Provide better exception message when one of many files results in an exception","user":{"login":"david-waterworth","id":5028974,"node_id":"MDQ6VXNlcjUwMjg5NzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5028974?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/david-waterworth","html_url":"https:\/\/github.com\/david-waterworth","followers_url":"https:\/\/api.github.com\/users\/david-waterworth\/followers","following_url":"https:\/\/api.github.com\/users\/david-waterworth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/david-waterworth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/david-waterworth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/david-waterworth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/david-waterworth\/orgs","repos_url":"https:\/\/api.github.com\/users\/david-waterworth\/repos","events_url":"https:\/\/api.github.com\/users\/david-waterworth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/david-waterworth\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-05T00:49:03Z","updated_at":"2021-02-09T17:39:27Z","closed_at":"2021-02-09T17:39:27Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I find when I process many files, i.e.\r\n\r\n```\r\ntrain_files = glob.glob('rain*.csv')\r\nvalidation_files = glob.glob(validation*.csv')\r\ndatasets = load_dataset(\"csv\", data_files=dict(train=train_files, validation=validation_files))\r\n```\r\n\r\nI sometimes encounter an error due to one of the files being misformed (i.e. no data, or a comma in a field that isn't quoted, etc).\r\n\r\nFor example, this is the tail of an exception which I suspect is due to a stray comma.\r\n\r\n> File \"pandas\/_libs\/parsers.pyx\", line 756, in pandas._libs.parsers.TextReader.read\r\n> File \"pandas\/_libs\/parsers.pyx\", line 783, in pandas._libs.parsers.TextReader._read_low_memory\r\n> File \"pandas\/_libs\/parsers.pyx\", line 827, in pandas._libs.parsers.TextReader._read_rows\r\n> File \"pandas\/_libs\/parsers.pyx\", line 814, in pandas._libs.parsers.TextReader._tokenize_rows\r\n> File \"pandas\/_libs\/parsers.pyx\", line 1951, in pandas._libs.parsers.raise_parser_error\r\n> pandas.errors.ParserError: Error tokenizing data. C error: Expected 2 fields in line 559, saw 3\r\n\r\nIt would be nice if the exception trace contained the name of the file being processed (I have 250 separate files!)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1821\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1821\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1820","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1820\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1820\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1820\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1820","id":801529936,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY3ODI4OTg1","number":1820,"title":"Add metrics usage examples and tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-04T18:23:50Z","updated_at":"2021-02-05T14:00:01Z","closed_at":"2021-02-05T14:00:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1820","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1820","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1820.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1820.patch","merged_at":"2021-02-05T14:00:00Z"},"body":"All metrics finally have usage examples and proper fast + slow tests :)\r\n\r\nI added examples of usage for every metric, and I use doctest to make sure they all work as expected.\r\n\r\nFor \"slow\" metrics such as bert_score or bleurt which require to download + run a transformer model, the download + forward pass are only done in the slow test.\r\nIn the fast test on the other hand, the download + forward pass are monkey patched.\r\n\r\nMetrics that need to be installed from github are not added to setup.py because it prevents uploading the `datasets` package to pypi.\r\nAn additional-test-requirements.txt file is used instead. This file also include `comet` in order to not have to resolve its *impossible* dependencies.\r\n\r\nAlso `comet` is not tested on windows because one of its dependencies (fairseq) can't be installed in the CI for some reason.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1820\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1820\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1819","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1819\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1819\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1819\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1819","id":801448670,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY3NzYyMzI2","number":1819,"title":"Fixed spelling `S3Fileystem` to `S3FileSystem`","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-04T16:36:46Z","updated_at":"2021-02-04T16:52:27Z","closed_at":"2021-02-04T16:52:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1819","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1819","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1819.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1819.patch","merged_at":"2021-02-04T16:52:26Z"},"body":"Fixed documentation spelling errors. \r\nWrong `S3Fileystem`\r\nRight `S3FileSystem`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1819\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1819\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1818","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1818\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1818\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1818\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1818","id":800958776,"node_id":"MDU6SXNzdWU4MDA5NTg3NzY=","number":1818,"title":"Loading local dataset raise requests.exceptions.ConnectTimeout","user":{"login":"Alxe1","id":15032072,"node_id":"MDQ6VXNlcjE1MDMyMDcy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15032072?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Alxe1","html_url":"https:\/\/github.com\/Alxe1","followers_url":"https:\/\/api.github.com\/users\/Alxe1\/followers","following_url":"https:\/\/api.github.com\/users\/Alxe1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Alxe1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Alxe1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Alxe1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Alxe1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Alxe1\/repos","events_url":"https:\/\/api.github.com\/users\/Alxe1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Alxe1\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-04T05:55:23Z","updated_at":"2021-02-05T13:23:35Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Load local dataset:\r\n```\r\ndataset = load_dataset('json', data_files=[\"..\/..\/data\/json.json\"])\r\ntrain = dataset[\"train\"]\r\nprint(train.features)\r\ntrain1 = train.map(lambda x: {\"labels\": 1})\r\nprint(train1[:2])\r\n```\r\n\r\nbut it raised requests.exceptions.ConnectTimeout:\r\n\r\n```\r\n\/Users\/littlely\/myvirtual\/tf2\/bin\/python3.7 \/Users\/littlely\/projects\/python_projects\/pytorch_learning\/nlp\/dataset\/transformers_datasets.py\r\nTraceback (most recent call last):\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/urllib3\/connection.py\", line 160, in _new_conn\r\n (self._dns_host, self.port), self.timeout, **extra_kw\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/urllib3\/util\/connection.py\", line 84, in create_connection\r\n raise err\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/urllib3\/util\/connection.py\", line 74, in create_connection\r\n sock.connect(sa)\r\nsocket.timeout: timed out\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/urllib3\/connectionpool.py\", line 677, in urlopen\r\n chunked=chunked,\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/urllib3\/connectionpool.py\", line 381, in _make_request\r\n self._validate_conn(conn)\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/urllib3\/connectionpool.py\", line 978, in _validate_conn\r\n conn.connect()\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/urllib3\/connection.py\", line 309, in connect\r\n conn = self._new_conn()\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/urllib3\/connection.py\", line 167, in _new_conn\r\n % (self.host, self.timeout),\r\nurllib3.exceptions.ConnectTimeoutError: (, 'Connection to s3.amazonaws.com timed out. (connect timeout=10)')\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/requests\/adapters.py\", line 449, in send\r\n timeout=timeout\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/urllib3\/connectionpool.py\", line 727, in urlopen\r\n method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/urllib3\/util\/retry.py\", line 439, in increment\r\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\r\nurllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='s3.amazonaws.com', port=443): Max retries exceeded with url: \/datasets.huggingface.co\/datasets\/datasets\/json\/json.py (Caused by ConnectTimeoutError(, 'Connection to s3.amazonaws.com timed out. (connect timeout=10)'))\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/Users\/littlely\/projects\/python_projects\/pytorch_learning\/nlp\/dataset\/transformers_datasets.py\", line 12, in \r\n dataset = load_dataset('json', data_files=[\"..\/..\/data\/json.json\"])\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 591, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 263, in prepare_module\r\n head_hf_s3(path, filename=name, dataset=dataset, max_retries=download_config.max_retries)\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 232, in head_hf_s3\r\n max_retries=max_retries,\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 523, in http_head\r\n max_retries=max_retries,\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 458, in _request_with_retry\r\n raise err\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 454, in _request_with_retry\r\n response = requests.request(verb.upper(), url, **params)\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/requests\/api.py\", line 61, in request\r\n return session.request(method=method, url=url, **kwargs)\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/requests\/sessions.py\", line 530, in request\r\n resp = self.send(prep, **send_kwargs)\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/requests\/sessions.py\", line 643, in send\r\n r = adapter.send(request, **kwargs)\r\n File \"\/Users\/littlely\/myvirtual\/tf2\/lib\/python3.7\/site-packages\/requests\/adapters.py\", line 504, in send\r\n raise ConnectTimeout(e, request=request)\r\nrequests.exceptions.ConnectTimeout: HTTPSConnectionPool(host='s3.amazonaws.com', port=443): Max retries exceeded with url: \/datasets.huggingface.co\/datasets\/datasets\/json\/json.py (Caused by ConnectTimeoutError(, 'Connection to s3.amazonaws.com timed out. (connect timeout=10)'))\r\n\r\nProcess finished with exit code 1\r\n\r\n```\r\n\r\nWhy it want to connect a remote url when I load local datasets, and how can I fix it?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1818\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1818\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1817","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1817\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1817\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1817\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1817","id":800870652,"node_id":"MDU6SXNzdWU4MDA4NzA2NTI=","number":1817,"title":"pyarrow.lib.ArrowInvalid: Column 1 named input_ids expected length 599 but got length 1500","user":{"login":"LuCeHe","id":9610770,"node_id":"MDQ6VXNlcjk2MTA3NzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9610770?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LuCeHe","html_url":"https:\/\/github.com\/LuCeHe","followers_url":"https:\/\/api.github.com\/users\/LuCeHe\/followers","following_url":"https:\/\/api.github.com\/users\/LuCeHe\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LuCeHe\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LuCeHe\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LuCeHe\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LuCeHe\/orgs","repos_url":"https:\/\/api.github.com\/users\/LuCeHe\/repos","events_url":"https:\/\/api.github.com\/users\/LuCeHe\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LuCeHe\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-04T02:30:23Z","updated_at":"2021-02-07T14:04:24Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to preprocess any dataset in this package with GPT-2 tokenizer, so I need to structure the datasets as long sequences of text without padding. I've been following a couple of your tutorials and here you can find the script that is failing right at the end\r\n\r\nhttps:\/\/github.com\/LuCeHe\/GenericTools\/blob\/master\/KerasTools\/lm_preprocessing.py\r\n\r\nIn the last iteration of the last dset.map, it gives the error that I copied in the title. Another issue that I have, if I leave the batch_size set as 1000 in the last .map, I'm afraid it's going to lose most text, so I'm considering setting both writer_batch_size and batch_size to 300 K, but I'm not sure it's the best way to go.\r\n\r\nCan you help me?\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1817\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1817\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1816","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1816\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1816\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1816\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1816","id":800660995,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY3MTExMjEx","number":1816,"title":"Doc2dial rc update to latest version","user":{"login":"songfeng","id":2062185,"node_id":"MDQ6VXNlcjIwNjIxODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2062185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/songfeng","html_url":"https:\/\/github.com\/songfeng","followers_url":"https:\/\/api.github.com\/users\/songfeng\/followers","following_url":"https:\/\/api.github.com\/users\/songfeng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/songfeng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/songfeng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/songfeng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/songfeng\/orgs","repos_url":"https:\/\/api.github.com\/users\/songfeng\/repos","events_url":"https:\/\/api.github.com\/users\/songfeng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/songfeng\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-03T20:08:54Z","updated_at":"2021-02-15T15:15:24Z","closed_at":"2021-02-15T15:04:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1816","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1816","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1816.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1816.patch","merged_at":"2021-02-15T15:04:33Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1816\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1816\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1815","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1815\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1815\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1815\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1815","id":800610017,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY3MDY3NjU1","number":1815,"title":"Add CCAligned Multilingual Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-02-03T18:59:52Z","updated_at":"2021-03-01T12:33:03Z","closed_at":"2021-03-01T10:36:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1815","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1815","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1815.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1815.patch","merged_at":"2021-03-01T10:36:21Z"},"body":"Hello,\r\n\r\nI'm trying to add [CCAligned Multilingual Dataset](http:\/\/www.statmt.org\/cc-aligned\/). This has the potential to close #1756.\r\n\r\nThis dataset has two types - Document-Pairs, and Sentence-Pairs.\r\n\r\nThe datasets are huge, so I won't be able to test all of them. At the same time, a user might only want to download one particular language and not all. To provide this feature, `load_dataset`'s `**config_kwargs` should allow some random keyword args, in this case -`language_code`. This will be needed before the dataset is downloaded and extracted.\r\n\r\nI'm expecting the usage to be something like - \r\n`load_dataset('ccaligned_multilingual','documents',language_code='en_XX-af_ZA')`. Ofcourse, at a later stage we can provide just two character language codes. This also has an issue where one language has multiple files (`my_MM` and `my_MM_zaw` on the link), but before that the required functionality must be added to `load_dataset`.\r\n\r\nIt would be great if someone could either tell me an alternative way to do this, or point me to where changes need to be made, if any, apart from the `BuilderConfig` definition. \r\n\r\nAdditionally, I believe the tests will also have to be modified if this change is made, since it would not be possible to test for any random keyword arguments. \r\n\r\nA decent way to go about this would be to provide all the options in a list\/dictionary for `language_code` and use that to test the arguments. In essence, this is similar to the pre-trained checkpoint dictionary as `transformers`. That means writing dataset specific tests, or adding something new to dataset generation script to make it easier for everyone to add keyword arguments without having to worry about the tests.\r\n\r\nThanks,\r\nGunjan\r\n\r\nRequesting @lhoestq \/ @yjernite to review.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1815\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1815\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1814","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1814\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1814\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1814\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1814","id":800516236,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY2OTg4NTI1","number":1814,"title":"Add Freebase QA Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-03T16:57:49Z","updated_at":"2021-02-04T19:47:51Z","closed_at":"2021-02-04T16:21:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1814","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1814","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1814.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1814.patch","merged_at":"2021-02-04T16:21:48Z"},"body":"Closes PR #1435. Fixed issues with PR #1809.\r\n\r\nRequesting @lhoestq to review.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1814\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1814\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1813","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1813\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1813\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1813\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1813","id":800435973,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY2OTIxNDcz","number":1813,"title":"Support future datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-03T15:26:49Z","updated_at":"2021-02-05T10:33:48Z","closed_at":"2021-02-05T10:33:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1813","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1813","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1813.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1813.patch","merged_at":"2021-02-05T10:33:47Z"},"body":"If a dataset is available at the version of the local installation of `datasets` (e.g. 1.2.0), then loading this dataset means loading the script at this version.\r\n\r\nHowever when trying to load a dataset that is only available on master, currently users have to specify `script_version=\"master\"` in `load_dataset` to make it work.\r\n\r\nHowever we could automatically get the dataset from master instead in this case.\r\n\r\nI added this feature in this PR.\r\nI also added a warning if a dataset is not available at the version of the local installation of `datasets` but is loaded from master:\r\n```python\r\n>>> load_dataset(\"silicone\", \"dyda_da\")\r\nCouldn't find file locally at silicone\/silicone.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.2.0\/datasets\/silicone\/silicone.py.\r\nThe file was picked from the master branch on github instead at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/silicone\/silicone.py.\r\nDownloading and preparing dataset silicone\/dyda_da (download: 8.46 MiB, generated: 9.39 MiB, post-processed: Unknown size, total: 17.86 MiB) to \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/silicone\/dyda_da\/1.0.0\/d41d8c0b73c6df035b1369c45774418f0051163ea689b5502b8bda783adf6342...\r\n...\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1813\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1813\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1812","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1812\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1812\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1812\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1812","id":799379178,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY2MDMxODIy","number":1812,"title":"Add CIFAR-100 Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-02T15:22:59Z","updated_at":"2021-02-08T11:10:18Z","closed_at":"2021-02-08T10:39:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1812","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1812","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1812.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1812.patch","merged_at":"2021-02-08T10:39:06Z"},"body":"Adding CIFAR-100 Dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1812\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1812\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1811","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1811\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1811\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1811\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1811","id":799211060,"node_id":"MDU6SXNzdWU3OTkyMTEwNjA=","number":1811,"title":"Unable to add Multi-label Datasets","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-02T11:50:56Z","updated_at":"2021-02-18T14:16:31Z","closed_at":"2021-02-18T14:16:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to add [CIFAR-100](https:\/\/www.cs.toronto.edu\/~kriz\/cifar.html) dataset. The dataset contains two labels per image - `fine label` and `coarse label`. Using just one label in supervised keys as \r\n`supervised_keys=(\"img\", \"fine_label\")` raises no issue. But trying `supervised_keys=(\"img\", \"fine_label\",\"coarse_label\")` leads to this error : \r\n\r\n```python\r\nTraceback (most recent call last):\r\n File \"test_script.py\", line 2, in \r\n d = load_dataset('.\/datasets\/cifar100')\r\n File \"~\/datasets\/src\/datasets\/load.py\", line 668, in load_dataset\r\n **config_kwargs,\r\n File \"~\/datasets\/src\/datasets\/builder.py\", line 896, in __init__\r\n super(GeneratorBasedBuilder, self).__init__(*args, **kwargs)\r\n File \"~\/datasets\/src\/datasets\/builder.py\", line 247, in __init__\r\n info.update(self._info())\r\n File \"~\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/cifar100\/61d2489b2d4a4abc34201432541b7380984ec714e290817d9a1ee318e4b74e0f\/cifar100.py\", line 79, in _info\r\n citation=_CITATION,\r\n File \"\", line 19, in __init__\r\n File \"~\/datasets\/src\/datasets\/info.py\", line 136, in __post_init__\r\n self.supervised_keys = SupervisedKeysData(*self.supervised_keys)\r\nTypeError: __init__() takes from 1 to 3 positional arguments but 4 were given\r\n```\r\nIs there a way I can fix this?\r\n\r\nAlso, what does adding `supervised_keys` do? Is it necessary? How would I specify `supervised_keys` for a multi-input, multi-label dataset?\r\n\r\nThanks,\r\nGunjan","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1811\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1811\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1810","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1810\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1810\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1810\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1810","id":799168650,"node_id":"MDU6SXNzdWU3OTkxNjg2NTA=","number":1810,"title":"Add Hateful Memes Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":3608941089,"node_id":"LA_kwDODunzps7XHBIh","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/vision","name":"vision","color":"bfdadc","default":false,"description":"Vision datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-02T10:53:59Z","updated_at":"2021-12-08T12:03:59Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Add Hateful Memes Dataset\r\n- **Name:** Hateful Memes\r\n- **Description:** [https:\/\/ai.facebook.com\/blog\/hateful-memes-challenge-and-data-set]( https:\/\/ai.facebook.com\/blog\/hateful-memes-challenge-and-data-set)\r\n- **Paper:** [https:\/\/arxiv.org\/pdf\/2005.04790.pdf](https:\/\/arxiv.org\/pdf\/2005.04790.pdf)\r\n- **Data:** [This link](https:\/\/drivendata-competition-fb-hateful-memes-data.s3.amazonaws.com\/XjiOc5ycDBRRNwbhRlgH.zip?AWSAccessKeyId=AKIARVBOBDCY4MWEDJKS&Signature=DaUuGgZWUgDHzEPPbyJ2PhSJ56Q%3D&Expires=1612816874)\r\n- **Motivation:** Including multi-modal datasets to \ud83e\udd17 datasets.\r\n\r\nI will be adding this dataset. It requires the user to sign an agreement on DrivenData. So, it will be used with a manual download.\r\n\r\nThe issue with this dataset is that the images are of different sizes. The image datasets added so far (CIFAR-10 and MNIST) have a uniform shape throughout.\r\nSo something like \r\n```python\r\n datasets.Array2D(shape=(28, 28), dtype=\"uint8\")\r\n```\r\nwon't work for the images. How would I add image features then? I checked `datasets\/features.py` but couldn't figure out the appropriate class for this. I'm assuming I would want to avoid re-sizing at all since we want the user to be able to access the original images.\r\n\r\nAlso, in case I want to load only a subset of the data, since the actual data is around 8.8GB, how would that be possible?\r\n\r\nThanks,\r\nGunjan","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1810\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1810\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1809","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1809\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1809\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1809\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1809","id":799059141,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY1NzY4ODQz","number":1809,"title":"Add FreebaseQA dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-02-02T08:35:53Z","updated_at":"2021-02-03T17:15:05Z","closed_at":"2021-02-03T16:43:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1809","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1809","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1809.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1809.patch","merged_at":null},"body":"Adding FreebaseQA dataset suggested in PR #1435 with minor edits. Also closes that PR.\r\nRequesting @lhoestq to review.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1809\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1809\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1808","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1808\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1808\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1808\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1808","id":798879180,"node_id":"MDU6SXNzdWU3OTg4NzkxODA=","number":1808,"title":"writing Datasets in a human readable format","user":{"login":"ghost","id":10137,"node_id":"MDQ6VXNlcjEwMTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghost","html_url":"https:\/\/github.com\/ghost","followers_url":"https:\/\/api.github.com\/users\/ghost\/followers","following_url":"https:\/\/api.github.com\/users\/ghost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghost\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghost\/repos","events_url":"https:\/\/api.github.com\/users\/ghost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghost\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-02T02:55:40Z","updated_at":"2021-02-02T15:04:08Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI see there is a save_to_disk function to save data, but this is not human readable format, is there a way I could save a Dataset object in a human readable format to a file like json? thanks @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1808\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1808\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1807","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1807\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1807\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1807\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1807","id":798823591,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY1NTczNzU5","number":1807,"title":"Adding an aggregated dataset for the GEM benchmark","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-02T00:39:53Z","updated_at":"2021-02-02T22:48:41Z","closed_at":"2021-02-02T18:06:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1807","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1807","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1807.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1807.patch","merged_at":"2021-02-02T18:06:58Z"},"body":"This dataset gathers modified versions of several other conditional text generation datasets which together make up the shared task for the Generation Evaluation and Metrics workshop (think GLUE for text generation)\r\n\r\nThe changes from the original datasets are detailed in the Dataset Cards on the GEM website, which are linked to in this dataset card.\r\n\r\ncc @sebastianGehrmann\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1807\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1807\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1806","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1806\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1806\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1806\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1806","id":798607869,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY1Mzk0ODIz","number":1806,"title":"Update details to MLSUM dataset","user":{"login":"padipadou","id":15138872,"node_id":"MDQ6VXNlcjE1MTM4ODcy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15138872?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/padipadou","html_url":"https:\/\/github.com\/padipadou","followers_url":"https:\/\/api.github.com\/users\/padipadou\/followers","following_url":"https:\/\/api.github.com\/users\/padipadou\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/padipadou\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/padipadou\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/padipadou\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/padipadou\/orgs","repos_url":"https:\/\/api.github.com\/users\/padipadou\/repos","events_url":"https:\/\/api.github.com\/users\/padipadou\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/padipadou\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-01T18:35:12Z","updated_at":"2021-02-01T18:46:28Z","closed_at":"2021-02-01T18:46:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1806","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1806","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1806.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1806.patch","merged_at":"2021-02-01T18:46:21Z"},"body":"Update details to MLSUM dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1806\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1806\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1805","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1805\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1805\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1805\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1805","id":798498053,"node_id":"MDU6SXNzdWU3OTg0OTgwNTM=","number":1805,"title":"can't pickle SwigPyObject objects when calling dataset.get_nearest_examples from FAISS index","user":{"login":"abarbosa94","id":6608232,"node_id":"MDQ6VXNlcjY2MDgyMzI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6608232?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abarbosa94","html_url":"https:\/\/github.com\/abarbosa94","followers_url":"https:\/\/api.github.com\/users\/abarbosa94\/followers","following_url":"https:\/\/api.github.com\/users\/abarbosa94\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abarbosa94\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abarbosa94\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abarbosa94\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abarbosa94\/orgs","repos_url":"https:\/\/api.github.com\/users\/abarbosa94\/repos","events_url":"https:\/\/api.github.com\/users\/abarbosa94\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abarbosa94\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-01T16:14:17Z","updated_at":"2021-03-06T14:32:46Z","closed_at":"2021-03-06T14:32:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"So, I have the following instances in my dataset\r\n\r\n```\r\n{'question': 'An astronomer observes that a planet rotates faster after a meteorite impact. Which is the most likely effect of \r\nthis increase in rotation?', \r\n'answer': 'C', \r\n'example_id': 'ARCCH_Mercury_7175875', \r\n'options':[{'option_context': 'One effect of increased amperage in the planetary world (..)', 'option_id': 'A', 'option_text': 'Planetary density will decrease.'},\r\n (...)]}\r\n```\r\n\r\nThe `options` value is always an list with 4 options, each one is a dict with `option_context`; `option_id` and `option_text`.\r\n\r\nI would like to overwrite the `option_context` of each instance of my dataset for a dpr result that I am developing. Then, I trained a model already and save it in a FAISS index\r\n```\r\ndpr_dataset = load_dataset(\r\n \"text\",\r\n data_files=ARC_CORPUS_TEXT,\r\n cache_dir=CACHE_DIR,\r\n split=\"train[:100%]\",\r\n )\r\ndpr_dataset.load_faiss_index(\"embeddings\", f\"{ARC_CORPUS_FAISS}\")\r\ntorch.set_grad_enabled(False)\r\n```\r\n\r\nThen, as a processor of my dataset, I created a map function that calls the `dpr_dataset` for each _option_\r\n\r\n```\r\ndef generate_context(example):\r\n question_text = example['question']\r\n for option in example['options']:\r\n question_with_option = question_text + \" \" + option['option_text']\r\n tokenize_text = question_tokenizer(question_with_option, return_tensors=\"pt\").to(device)\r\n question_embed = (\r\n question_encoder(**tokenize_text)\r\n )[0][0].cpu().numpy()\r\n _, retrieved_examples = dpr_dataset.get_nearest_examples(\r\n \"embeddings\", question_embed, k=10\r\n )\r\n # option[\"option_context\"] = retrieved_examples[\"text\"]\r\n # option[\"option_context\"] = \" \".join(option[\"option_context\"]).strip()\r\n #result_dict = {\r\n # 'example_id': example['example_id'],\r\n # 'answer': example['answer'],\r\n # 'question': question_text,\r\n #options': example['options']\r\n # }\r\n return example\r\n```\r\n\r\nI intentionally commented on this portion of the code.\r\n\r\nBut when I call the `map` method, `ds_with_context = dataset.map(generate_context,load_from_cache_file=False)`\r\n\r\nIt calls the following error:\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n----> 1 ds_with_context = dataset.map(generate_context,load_from_cache_file=False)\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py in map(self, function, with_indices, input_columns, batched, batch_size, remove_columns, keep_in_memory, load_from_cache_file, cache_file_names, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc)\r\n 301 num_proc=num_proc,\r\n 302 )\r\n--> 303 for k, dataset in self.items()\r\n 304 }\r\n 305 )\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py in (.0)\r\n 301 num_proc=num_proc,\r\n 302 )\r\n--> 303 for k, dataset in self.items()\r\n 304 }\r\n 305 )\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)\r\n 1257 fn_kwargs=fn_kwargs,\r\n 1258 new_fingerprint=new_fingerprint,\r\n-> 1259 update_data=update_data,\r\n 1260 )\r\n 1261 else:\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 155 }\r\n 156 # apply actual function\r\n--> 157 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 158 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 159 # re-apply format to the output\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 156 kwargs_for_fingerprint[\"fingerprint_name\"] = fingerprint_name\r\n 157 kwargs[fingerprint_name] = update_fingerprint(\r\n--> 158 self._fingerprint, transform, kwargs_for_fingerprint\r\n 159 )\r\n 160 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in update_fingerprint(fingerprint, transform, transform_args)\r\n 103 for key in sorted(transform_args):\r\n 104 hasher.update(key)\r\n--> 105 hasher.update(transform_args[key])\r\n 106 return hasher.hexdigest()\r\n 107 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in update(self, value)\r\n 55 def update(self, value):\r\n 56 self.m.update(f\"=={type(value)}==\".encode(\"utf8\"))\r\n---> 57 self.m.update(self.hash(value).encode(\"utf-8\"))\r\n 58 \r\n 59 def hexdigest(self):\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in hash(cls, value)\r\n 51 return cls.dispatch[type(value)](cls, value)\r\n 52 else:\r\n---> 53 return cls.hash_default(value)\r\n 54 \r\n 55 def update(self, value):\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in hash_default(cls, value)\r\n 44 @classmethod\r\n 45 def hash_default(cls, value):\r\n---> 46 return cls.hash_bytes(dumps(value))\r\n 47 \r\n 48 @classmethod\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py in dumps(obj)\r\n 387 file = StringIO()\r\n 388 with _no_cache_fields(obj):\r\n--> 389 dump(obj, file)\r\n 390 return file.getvalue()\r\n 391 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py in dump(obj, file)\r\n 359 def dump(obj, file):\r\n 360 \"\"\"pickle an object to a file\"\"\"\r\n--> 361 Pickler(file, recurse=True).dump(obj)\r\n 362 return\r\n 363 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/dill\/_dill.py in dump(self, obj)\r\n 452 raise PicklingError(msg)\r\n 453 else:\r\n--> 454 StockPickler.dump(self, obj)\r\n 455 stack.clear() # clear record of 'recursion-sensitive' pickled objects\r\n 456 return\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in dump(self, obj)\r\n 435 if self.proto >= 4:\r\n 436 self.framer.start_framing()\r\n--> 437 self.save(obj)\r\n 438 self.write(STOP)\r\n 439 self.framer.end_framing()\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py in save_function(pickler, obj)\r\n 554 dill._dill._create_function,\r\n 555 (obj.__code__, globs, obj.__name__, obj.__defaults__, obj.__closure__, obj.__dict__, fkwdefaults),\r\n--> 556 obj=obj,\r\n 557 )\r\n 558 else:\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj)\r\n 636 else:\r\n 637 save(func)\r\n--> 638 save(args)\r\n 639 write(REDUCE)\r\n 640 \r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save_tuple(self, obj)\r\n 784 write(MARK)\r\n 785 for element in obj:\r\n--> 786 save(element)\r\n 787 \r\n 788 if id(obj) in memo:\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/dill\/_dill.py in save_module_dict(pickler, obj)\r\n 939 # we only care about session the first pass thru\r\n 940 pickler._session = False\r\n--> 941 StockPickler.save_dict(pickler, obj)\r\n 942 log.info(\"# D2\")\r\n 943 return\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save_dict(self, obj)\r\n 854 \r\n 855 self.memoize(obj)\r\n--> 856 self._batch_setitems(obj.items())\r\n 857 \r\n 858 dispatch[dict] = save_dict\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in _batch_setitems(self, items)\r\n 880 for k, v in tmp:\r\n 881 save(k)\r\n--> 882 save(v)\r\n 883 write(SETITEMS)\r\n 884 elif n:\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 547 \r\n 548 # Save the reduce() output and finally memoize the object\r\n--> 549 self.save_reduce(obj=obj, *rv)\r\n 550 \r\n 551 def persistent_id(self, obj):\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj)\r\n 660 \r\n 661 if state is not None:\r\n--> 662 save(state)\r\n 663 write(BUILD)\r\n 664 \r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/dill\/_dill.py in save_module_dict(pickler, obj)\r\n 939 # we only care about session the first pass thru\r\n 940 pickler._session = False\r\n--> 941 StockPickler.save_dict(pickler, obj)\r\n 942 log.info(\"# D2\")\r\n 943 return\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save_dict(self, obj)\r\n 854 \r\n 855 self.memoize(obj)\r\n--> 856 self._batch_setitems(obj.items())\r\n 857 \r\n 858 dispatch[dict] = save_dict\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in _batch_setitems(self, items)\r\n 880 for k, v in tmp:\r\n 881 save(k)\r\n--> 882 save(v)\r\n 883 write(SETITEMS)\r\n 884 elif n:\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/dill\/_dill.py in save_module_dict(pickler, obj)\r\n 939 # we only care about session the first pass thru\r\n 940 pickler._session = False\r\n--> 941 StockPickler.save_dict(pickler, obj)\r\n 942 log.info(\"# D2\")\r\n 943 return\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save_dict(self, obj)\r\n 854 \r\n 855 self.memoize(obj)\r\n--> 856 self._batch_setitems(obj.items())\r\n 857 \r\n 858 dispatch[dict] = save_dict\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in _batch_setitems(self, items)\r\n 885 k, v = tmp[0]\r\n 886 save(k)\r\n--> 887 save(v)\r\n 888 write(SETITEM)\r\n 889 # else tmp is empty, and we're done\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 547 \r\n 548 # Save the reduce() output and finally memoize the object\r\n--> 549 self.save_reduce(obj=obj, *rv)\r\n 550 \r\n 551 def persistent_id(self, obj):\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj)\r\n 660 \r\n 661 if state is not None:\r\n--> 662 save(state)\r\n 663 write(BUILD)\r\n 664 \r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/dill\/_dill.py in save_module_dict(pickler, obj)\r\n 939 # we only care about session the first pass thru\r\n 940 pickler._session = False\r\n--> 941 StockPickler.save_dict(pickler, obj)\r\n 942 log.info(\"# D2\")\r\n 943 return\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save_dict(self, obj)\r\n 854 \r\n 855 self.memoize(obj)\r\n--> 856 self._batch_setitems(obj.items())\r\n 857 \r\n 858 dispatch[dict] = save_dict\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in _batch_setitems(self, items)\r\n 880 for k, v in tmp:\r\n 881 save(k)\r\n--> 882 save(v)\r\n 883 write(SETITEMS)\r\n 884 elif n:\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 547 \r\n 548 # Save the reduce() output and finally memoize the object\r\n--> 549 self.save_reduce(obj=obj, *rv)\r\n 550 \r\n 551 def persistent_id(self, obj):\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj)\r\n 660 \r\n 661 if state is not None:\r\n--> 662 save(state)\r\n 663 write(BUILD)\r\n 664 \r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/dill\/_dill.py in save_module_dict(pickler, obj)\r\n 939 # we only care about session the first pass thru\r\n 940 pickler._session = False\r\n--> 941 StockPickler.save_dict(pickler, obj)\r\n 942 log.info(\"# D2\")\r\n 943 return\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save_dict(self, obj)\r\n 854 \r\n 855 self.memoize(obj)\r\n--> 856 self._batch_setitems(obj.items())\r\n 857 \r\n 858 dispatch[dict] = save_dict\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in _batch_setitems(self, items)\r\n 885 k, v = tmp[0]\r\n 886 save(k)\r\n--> 887 save(v)\r\n 888 write(SETITEM)\r\n 889 # else tmp is empty, and we're done\r\n\r\n\/usr\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 522 reduce = getattr(obj, \"__reduce_ex__\", None)\r\n 523 if reduce is not None:\r\n--> 524 rv = reduce(self.proto)\r\n 525 else:\r\n 526 reduce = getattr(obj, \"__reduce__\", None)\r\n\r\nTypeError: can't pickle SwigPyObject objects\r\n```\r\n\r\nWhich I have no idea how to solve\/deal with it\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1805\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1805\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1804","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1804\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1804\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1804\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1804","id":798483881,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY1MjkzMTc3","number":1804,"title":"Add SICK dataset","user":{"login":"calpt","id":36051308,"node_id":"MDQ6VXNlcjM2MDUxMzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36051308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/calpt","html_url":"https:\/\/github.com\/calpt","followers_url":"https:\/\/api.github.com\/users\/calpt\/followers","following_url":"https:\/\/api.github.com\/users\/calpt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/calpt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/calpt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/calpt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/calpt\/orgs","repos_url":"https:\/\/api.github.com\/users\/calpt\/repos","events_url":"https:\/\/api.github.com\/users\/calpt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/calpt\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-01T15:57:44Z","updated_at":"2021-02-05T17:46:28Z","closed_at":"2021-02-05T15:49:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1804","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1804","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1804.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1804.patch","merged_at":"2021-02-05T15:49:25Z"},"body":"Adds the SICK dataset (http:\/\/marcobaroni.org\/composes\/sick.html).\r\n\r\nCloses #1772.\r\n\r\nEdit: also closes #1632, which is the original issue requesting the dataset. The newer one is a duplicate.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1804\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1804\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1803","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1803\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1803\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1803\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1803","id":798243904,"node_id":"MDU6SXNzdWU3OTgyNDM5MDQ=","number":1803,"title":"Querying examples from big datasets is slower than small datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-02-01T11:08:23Z","updated_at":"2021-08-04T18:11:01Z","closed_at":"2021-08-04T18:10:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After some experiments with bookcorpus I noticed that querying examples from big datasets is slower than small datasets.\r\nFor example\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nb1 = load_dataset(\"bookcorpus\", split=\"train[:1%]\")\r\nb50 = load_dataset(\"bookcorpus\", split=\"train[:50%]\")\r\nb100 = load_dataset(\"bookcorpus\", split=\"train[:100%]\")\r\n\r\n%timeit _ = b1[-1] \r\n# 12.2 \u00b5s \u00b1 70.4 ns per loop (mean \u00b1 std. dev. of 7 runs, 100000 loops each)\r\n\r\n%timeit _ = b50[-1] \r\n# 92.5 \u00b5s \u00b1 1.24 \u00b5s per loop (mean \u00b1 std. dev. of 7 runs, 10000 loops each)\r\n\r\n%timeit _ = b100[-1] \r\n# 177 \u00b5s \u00b1 3.13 \u00b5s per loop (mean \u00b1 std. dev. of 7 runs, 10000 loops each)\r\n\r\n```\r\n\r\nIt looks like the time to fetch the example increases with the size of the dataset.\r\n\r\nThis is maybe due to the use of the Arrow streaming format to store the data on disk. I guess pyarrow needs to iterate through the file as a stream to find the queried sample.\r\n\r\nMaybe switching to the Arrow IPC file format could help fixing this issue.\r\n\r\nIndeed according to the [documentation](https:\/\/arrow.apache.org\/docs\/format\/Columnar.html?highlight=arrow1#ipc-file-format), it's identical to the streaming format except that it contains the memory offsets of each sample, which could fix the issue:\r\n> We define a \u201cfile format\u201d supporting random access that is build with the stream format. The file starts and ends with a magic string ARROW1 (plus padding). What follows in the file is identical to the stream format. At the end of the file, we write a footer containing a redundant copy of the schema (which is a part of the streaming format) plus memory offsets and sizes for each of the data blocks in the file. This enables random access any record batch in the file. See File.fbs for the precise details of the file footer.\r\n\r\ncc @gaceladri since it can help speed up your training when this one is fixed.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1803\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1803\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1802","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1802\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1802\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1802\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1802","id":797924468,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY0ODE4NDIy","number":1802,"title":"add github of contributors","user":{"login":"vasudevgupta7","id":53136577,"node_id":"MDQ6VXNlcjUzMTM2NTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53136577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vasudevgupta7","html_url":"https:\/\/github.com\/vasudevgupta7","followers_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/followers","following_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/orgs","repos_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/repos","events_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-01T03:49:19Z","updated_at":"2021-02-03T10:09:52Z","closed_at":"2021-02-03T10:06:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1802","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1802","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1802.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1802.patch","merged_at":"2021-02-03T10:06:30Z"},"body":"This PR will add contributors GitHub id at the end of every dataset cards.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1802\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1802\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1801","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1801\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1801\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1801\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1801","id":797814275,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY0NzMwODYw","number":1801,"title":"[GEM] Updated the source link of the data to update correct tokenized version.","user":{"login":"mounicam","id":11708999,"node_id":"MDQ6VXNlcjExNzA4OTk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11708999?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mounicam","html_url":"https:\/\/github.com\/mounicam","followers_url":"https:\/\/api.github.com\/users\/mounicam\/followers","following_url":"https:\/\/api.github.com\/users\/mounicam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mounicam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mounicam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mounicam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mounicam\/orgs","repos_url":"https:\/\/api.github.com\/users\/mounicam\/repos","events_url":"https:\/\/api.github.com\/users\/mounicam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mounicam\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-31T21:17:19Z","updated_at":"2021-02-02T13:17:38Z","closed_at":"2021-02-02T13:17:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1801","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1801","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1801.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1801.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1801\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1801\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1800","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1800\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1800\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1800\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1800","id":797798689,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY0NzE5MjA3","number":1800,"title":"Add DuoRC Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-31T20:01:59Z","updated_at":"2021-02-03T05:01:45Z","closed_at":"2021-02-02T22:49:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1800","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1800","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1800.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1800.patch","merged_at":"2021-02-02T22:49:26Z"},"body":"Hi,\r\n\r\nDuoRC SelfRC is one type of the [DuoRC Dataset](https:\/\/duorc.github.io\/). DuoRC SelfRC is a crowdsourced Abstractive\/Extractive Question-Answering dataset based on Wikipedia movie plots. It contains examples that may have answers in the movie plot, synthesized answers which are not present in the movie plot, or no answers. I have also added ParaphraseRC - the other type of DuoRC dataset where questions are based on Wikipedia movie plots and answers are based on corresponding IMDb movie plots.\r\n\r\nPaper : [https:\/\/arxiv.org\/abs\/1804.07927](https:\/\/arxiv.org\/abs\/1804.07927)\r\n\r\nI want to add this to \ud83e\udd17 datasets to make it more accessible to the community. I have added all the details that I could find. Please let me know if anything else is needed from my end.\r\n\r\nThanks,\r\nGunjan\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1800\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1800\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1799","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1799\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1799\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1799\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1799","id":797789439,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY0NzEyMzUy","number":1799,"title":"Update: SWDA - Fixed code to use all metadata features. Added comments and cleaned c\u2026","user":{"login":"gmihaila","id":22454783,"node_id":"MDQ6VXNlcjIyNDU0Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22454783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gmihaila","html_url":"https:\/\/github.com\/gmihaila","followers_url":"https:\/\/api.github.com\/users\/gmihaila\/followers","following_url":"https:\/\/api.github.com\/users\/gmihaila\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gmihaila\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gmihaila\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gmihaila\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gmihaila\/orgs","repos_url":"https:\/\/api.github.com\/users\/gmihaila\/repos","events_url":"https:\/\/api.github.com\/users\/gmihaila\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gmihaila\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-31T19:18:55Z","updated_at":"2021-02-09T22:06:13Z","closed_at":"2021-02-09T15:49:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1799","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1799","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1799.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1799.patch","merged_at":"2021-02-09T15:49:58Z"},"body":"This is a dataset I currently use my research and I realized some features are not being returned.\r\n\r\nPrevious code was not using all available metadata and was kind of messy\r\n\r\nI fixed code to use all metadata and made some modification to be more efficient and better formatted.\r\n\r\n\r\nPlease let me know if I need to make any changes.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1799\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1799\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1798","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1798\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1798\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1798\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1798","id":797766818,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY0Njk2NjE1","number":1798,"title":"Add Arabic sarcasm dataset","user":{"login":"mapmeld","id":643918,"node_id":"MDQ6VXNlcjY0MzkxOA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/643918?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mapmeld","html_url":"https:\/\/github.com\/mapmeld","followers_url":"https:\/\/api.github.com\/users\/mapmeld\/followers","following_url":"https:\/\/api.github.com\/users\/mapmeld\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mapmeld\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mapmeld\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mapmeld\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mapmeld\/orgs","repos_url":"https:\/\/api.github.com\/users\/mapmeld\/repos","events_url":"https:\/\/api.github.com\/users\/mapmeld\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mapmeld\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-31T17:38:55Z","updated_at":"2021-02-10T20:39:13Z","closed_at":"2021-02-03T10:35:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1798","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1798","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1798.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1798.patch","merged_at":"2021-02-03T10:35:54Z"},"body":"This MIT license dataset: https:\/\/github.com\/iabufarha\/ArSarcasm\r\n\r\nVia https:\/\/sites.google.com\/view\/ar-sarcasm-sentiment-detection\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1798\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1798\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1797","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1797\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1797\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1797\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1797","id":797357901,"node_id":"MDU6SXNzdWU3OTczNTc5MDE=","number":1797,"title":"Connection error","user":{"login":"smile0925","id":46243662,"node_id":"MDQ6VXNlcjQ2MjQzNjYy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46243662?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/smile0925","html_url":"https:\/\/github.com\/smile0925","followers_url":"https:\/\/api.github.com\/users\/smile0925\/followers","following_url":"https:\/\/api.github.com\/users\/smile0925\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/smile0925\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/smile0925\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/smile0925\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/smile0925\/orgs","repos_url":"https:\/\/api.github.com\/users\/smile0925\/repos","events_url":"https:\/\/api.github.com\/users\/smile0925\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/smile0925\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-30T07:32:45Z","updated_at":"2021-08-04T18:09:37Z","closed_at":"2021-08-04T18:09:37Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am hitting to the error, help me and thanks.\r\n\r\n`train_data = datasets.load_dataset(\"xsum\", split=\"train\")`\r\n`ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/xsum\/xsum.py`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1797\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1797\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1796","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1796\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1796\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1796\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1796","id":797329905,"node_id":"MDU6SXNzdWU3OTczMjk5MDU=","number":1796,"title":"Filter on dataset too much slowww","user":{"login":"ayubSubhaniya","id":20911334,"node_id":"MDQ6VXNlcjIwOTExMzM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20911334?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ayubSubhaniya","html_url":"https:\/\/github.com\/ayubSubhaniya","followers_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/followers","following_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/orgs","repos_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/repos","events_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-01-30T04:09:19Z","updated_at":"2021-02-18T17:09:24Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I have a dataset with 50M rows.\r\nFor pre-processing, I need to tokenize this and filter rows with the large sequence.\r\n\r\nMy tokenization took roughly 12mins. I used `map()` with batch size 1024 and multi-process with 96 processes.\r\n\r\nWhen I applied the `filter()` function it is taking too much time. I need to filter sequences based on a boolean column.\r\nBelow are the variants I tried.\r\n1. filter() with batch size 1024, single process (takes roughly 3 hr)\r\n2. filter() with batch size 1024, 96 processes (takes 5-6 hrs \u00af\\\\\\_(\u30c4)\\_\/\u00af)\r\n3. filter() with loading all data in memory, only a single boolean column (never ends).\r\n\r\nCan someone please help?\r\n\r\nBelow is a sample code for small dataset.\r\n\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('glue', 'mrpc', split='train')\r\ndataset = dataset.map(lambda x: {'flag': random.randint(0,1)==1})\r\n\r\ndef _amplify(data):\r\n return data\r\n\r\ndataset = dataset.filter(_amplify, batch_size=1024, keep_in_memory=False, input_columns=['flag'])\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1796\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1796\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1795","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1795\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1795\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1795\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1795","id":797021730,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY0MDk5OTUz","number":1795,"title":"Custom formatting for lazy map + arrow data extraction refactor","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-01-29T16:35:53Z","updated_at":"2021-02-05T09:54:07Z","closed_at":"2021-02-05T09:54:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1795","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1795","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1795.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1795.patch","merged_at":"2021-02-05T09:54:06Z"},"body":"Hi !\r\n\r\nThis PR refactors the way data are extracted from pyarrow tables to extend it to the use of custom formatting functions.\r\n\r\nWhile the internal storage of the dataset is always the Apache Arrow format, by setting a specific format on a dataset, you can cast the output of `datasets.Dataset.__getitem__` in NumPy\/pandas\/PyTorch\/TensorFlow, on-the-fly.\r\n\r\nA specific format can be activated with `datasets.Dataset.set_format`. For example: `dataset.set_format(type='torch', columns=['label'])`.\r\n\r\n### What's new:\r\n\r\nYou can now also define your own formatting function that is applied on-the-fly. To do so you can pass your formatting function in the `transform` parameter of `datasets.Dataset.set_format`, and keep `type` to `None`.\r\nA formatting function is a callable that takes a batch (as a dict, formatted as python) as input and returns a batch.\r\n\r\nHere is an example to tokenize and pad tokens on-the-fly when accessing the samples:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom transformers import BertTokenizer\r\n\r\ntokenizer = BertTokenizer.from_pretrained(\"bert-base-uncased\")\r\ndef encode(batch):\r\n return tokenizer(batch[\"sentence1\"], padding=\"longest\", truncation=True, max_length=512, return_tensors=\"pt\")\r\n\r\ndataset = load_dataset(\"glue\", \"mrpc\", split=\"train\")\r\ndataset.set_format(transform=encode)\r\ndataset.format\r\n# {'type': 'custom', 'format_kwargs': {'transform': }, 'columns': ['idx', 'label', 'sentence1', 'sentence2'], 'output_all_columns': False}\r\ndataset[:2]\r\n# {'input_ids': tensor([[ 101, 2572, 3217, ... 102]]), 'token_type_ids': tensor([[0, 0, 0, ... 0]]), 'attention_mask': tensor([[1, 1, 1, ... 1]])}\r\n```\r\n\r\nLet me know what you think of this API !\r\nWe can still change it if we want to.\r\n\r\nEspecially @sgugger since this may be useful when using `datasets` to train models.\r\n\r\nEDIT: this was changed to `dataset.set_transform(encode)`\r\n\r\n-------------------\r\n\r\nNote:\r\n\r\nI had to refactor the way data are extracted and formatted from pyarrow tables and I made it more robust and flexible. In particular I modularized it to be able to unit-test it properly. This was very helpful since I detected some bugs in the previous implementation and was able to fix them.\r\n\r\nSome bugs I found and fixed:\r\n- certain slices\/ranges were not supported because negative ids were passed to pyarrow\r\n- formatting as numpy\/torch\/tensorflow a column would make it lose its precision information (for example a column as `Value(\"float32\")`) would be returned as a tensor of float64 (default behavior for numpy)\r\n- on windows integers formatted as numpy\/torch\/tensorflow were not always int64 tensors by default but were int32 \r\n\r\nThe unit tests for those are now really extensive :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1795\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":3,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1795\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1794","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1794\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1794\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1794\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1794","id":796975588,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY0MDYyMTkw","number":1794,"title":"Move silicone directory","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-29T15:33:15Z","updated_at":"2021-01-29T16:31:39Z","closed_at":"2021-01-29T16:31:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1794","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1794","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1794.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1794.patch","merged_at":"2021-01-29T16:31:38Z"},"body":"The dataset was added in #1761 but not in the right directory. I'm moving it to \/datasets","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1794\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1794\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1793","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1793\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1793\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1793\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1793","id":796940299,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY0MDMzMjk0","number":1793,"title":"Minor fix the docstring of load_metric","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-29T14:47:35Z","updated_at":"2021-01-29T16:53:32Z","closed_at":"2021-01-29T16:53:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1793","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1793","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1793.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1793.patch","merged_at":"2021-01-29T16:53:32Z"},"body":"Minor fix:\r\n- duplicated attributes\r\n- format fix","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1793\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1793\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1792","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1792\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1792\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1792\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1792","id":796934627,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY0MDI4NTk1","number":1792,"title":"Allow loading dataset in-memory","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-01-29T14:39:50Z","updated_at":"2021-02-12T14:13:28Z","closed_at":"2021-02-12T14:13:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1792","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1792","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1792.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1792.patch","merged_at":"2021-02-12T14:13:28Z"},"body":"Allow loading datasets either from:\r\n- memory-mapped file (current implementation)\r\n- from file descriptor, copying data to physical memory\r\n\r\nClose #708","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1792\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1792\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1791","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1791\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1791\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1791\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1791","id":796924519,"node_id":"MDExOlB1bGxSZXF1ZXN0NTY0MDE5OTk3","number":1791,"title":"Small fix with corrected logging of train vectors","user":{"login":"TezRomacH","id":7549587,"node_id":"MDQ6VXNlcjc1NDk1ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7549587?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TezRomacH","html_url":"https:\/\/github.com\/TezRomacH","followers_url":"https:\/\/api.github.com\/users\/TezRomacH\/followers","following_url":"https:\/\/api.github.com\/users\/TezRomacH\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TezRomacH\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TezRomacH\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TezRomacH\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TezRomacH\/orgs","repos_url":"https:\/\/api.github.com\/users\/TezRomacH\/repos","events_url":"https:\/\/api.github.com\/users\/TezRomacH\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TezRomacH\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-29T14:26:06Z","updated_at":"2021-01-29T18:51:10Z","closed_at":"2021-01-29T17:05:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1791","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1791","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1791.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1791.patch","merged_at":"2021-01-29T17:05:07Z"},"body":"Now you can set `train_size` to the whole dataset size via `train_size = -1` and login writes not `Training the index with the first -1 vectors` but (for example) `Training the index with the first 16123 vectors`. And maybe more than dataset length. Logging will be correct","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1791\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1791\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1790","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1790\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1790\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1790\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1790","id":796678157,"node_id":"MDU6SXNzdWU3OTY2NzgxNTc=","number":1790,"title":"ModuleNotFoundError: No module named 'apache_beam', when specific languages.","user":{"login":"miyamonz","id":6331508,"node_id":"MDQ6VXNlcjYzMzE1MDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6331508?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/miyamonz","html_url":"https:\/\/github.com\/miyamonz","followers_url":"https:\/\/api.github.com\/users\/miyamonz\/followers","following_url":"https:\/\/api.github.com\/users\/miyamonz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/miyamonz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/miyamonz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/miyamonz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/miyamonz\/orgs","repos_url":"https:\/\/api.github.com\/users\/miyamonz\/repos","events_url":"https:\/\/api.github.com\/users\/miyamonz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/miyamonz\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-01-29T08:17:24Z","updated_at":"2021-03-25T12:10:51Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```py\r\nimport datasets\r\nwiki = datasets.load_dataset('wikipedia', '20200501.ja', cache_dir='.\/datasets')\r\n```\r\nthen `ModuleNotFoundError: No module named 'apache_beam'` happend.\r\n\r\nThe error doesn't appear when it's '20200501.en'.\r\nI don't know Apache Beam, but according to #498 it isn't necessary when it's saved to local. is it correct?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1790\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1790\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1789","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1789\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1789\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1789\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1789","id":796229721,"node_id":"MDExOlB1bGxSZXF1ZXN0NTYzNDQyMTc2","number":1789,"title":"[BUG FIX] typo in the import path for metrics","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-28T18:01:37Z","updated_at":"2021-01-28T18:13:56Z","closed_at":"2021-01-28T18:13:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1789","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1789","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1789.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1789.patch","merged_at":"2021-01-28T18:13:55Z"},"body":"This tiny PR fixes a typo introduced in https:\/\/github.com\/huggingface\/datasets\/pull\/1726 which prevents loading new metrics","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1789\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1789\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1788","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1788\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1788\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1788\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1788","id":795544422,"node_id":"MDExOlB1bGxSZXF1ZXN0NTYyODc1NzA2","number":1788,"title":"Doc2dial rc","user":{"login":"songfeng","id":2062185,"node_id":"MDQ6VXNlcjIwNjIxODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2062185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/songfeng","html_url":"https:\/\/github.com\/songfeng","followers_url":"https:\/\/api.github.com\/users\/songfeng\/followers","following_url":"https:\/\/api.github.com\/users\/songfeng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/songfeng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/songfeng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/songfeng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/songfeng\/orgs","repos_url":"https:\/\/api.github.com\/users\/songfeng\/repos","events_url":"https:\/\/api.github.com\/users\/songfeng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/songfeng\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-27T23:51:00Z","updated_at":"2021-01-28T18:46:13Z","closed_at":"2021-01-28T18:46:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1788","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1788","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1788.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1788.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1788\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1788\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1787","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1787\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1787\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1787\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1787","id":795485842,"node_id":"MDExOlB1bGxSZXF1ZXN0NTYyODI1NTI3","number":1787,"title":"Update the CommonGen citation information","user":{"login":"yuchenlin","id":10104354,"node_id":"MDQ6VXNlcjEwMTA0MzU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10104354?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yuchenlin","html_url":"https:\/\/github.com\/yuchenlin","followers_url":"https:\/\/api.github.com\/users\/yuchenlin\/followers","following_url":"https:\/\/api.github.com\/users\/yuchenlin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yuchenlin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yuchenlin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yuchenlin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yuchenlin\/orgs","repos_url":"https:\/\/api.github.com\/users\/yuchenlin\/repos","events_url":"https:\/\/api.github.com\/users\/yuchenlin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yuchenlin\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-27T22:12:47Z","updated_at":"2021-01-28T13:56:29Z","closed_at":"2021-01-28T13:56:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1787","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1787","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1787.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1787.patch","merged_at":"2021-01-28T13:56:29Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1787\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1787\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1786","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1786\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1786\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1786\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1786","id":795462816,"node_id":"MDU6SXNzdWU3OTU0NjI4MTY=","number":1786,"title":"How to use split dataset ","user":{"login":"kkhan188","id":78090287,"node_id":"MDQ6VXNlcjc4MDkwMjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/78090287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kkhan188","html_url":"https:\/\/github.com\/kkhan188","followers_url":"https:\/\/api.github.com\/users\/kkhan188\/followers","following_url":"https:\/\/api.github.com\/users\/kkhan188\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kkhan188\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kkhan188\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kkhan188\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kkhan188\/orgs","repos_url":"https:\/\/api.github.com\/users\/kkhan188\/repos","events_url":"https:\/\/api.github.com\/users\/kkhan188\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kkhan188\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-27T21:37:47Z","updated_at":"2021-04-23T15:17:39Z","closed_at":"2021-04-23T15:17:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"![Capture1](https:\/\/user-images.githubusercontent.com\/78090287\/106057436-cb6a1f00-6111-11eb-8c9c-3658065b1fdf.PNG)\r\n\r\nHey,\r\nI want to split the lambada dataset into corpus, test, train and valid txt files (like penn treebank) but I am not able to achieve this. What I am doing is, executing the lambada.py file in my project but its not giving desired results. Any help will be appreciated!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1786\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1786\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1785","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1785\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1785\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1785\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1785","id":795458856,"node_id":"MDU6SXNzdWU3OTU0NTg4NTY=","number":1785,"title":"Not enough disk space (Needed: Unknown size) when caching on a cluster","user":{"login":"olinguyen","id":4341867,"node_id":"MDQ6VXNlcjQzNDE4Njc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4341867?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/olinguyen","html_url":"https:\/\/github.com\/olinguyen","followers_url":"https:\/\/api.github.com\/users\/olinguyen\/followers","following_url":"https:\/\/api.github.com\/users\/olinguyen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/olinguyen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/olinguyen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/olinguyen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/olinguyen\/orgs","repos_url":"https:\/\/api.github.com\/users\/olinguyen\/repos","events_url":"https:\/\/api.github.com\/users\/olinguyen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/olinguyen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-01-27T21:30:59Z","updated_at":"2021-01-30T01:07:56Z","closed_at":"2021-01-30T01:07:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm running some experiments where I'm caching datasets on a cluster and accessing it through multiple compute nodes. However, I get an error when loading the cached dataset from the shared disk.\r\n\r\nThe exact error thrown:\r\n\r\n```bash\r\n>>> load_dataset(dataset, cache_dir=\"\/path\/to\/cluster\/shared\/path\")\r\nOSError: Not enough disk space. Needed: Unknown size (download: Unknown size, generated: Unknown size, post-processed: Unknown size)\r\n```\r\n\r\n\r\n[`utils.has_sufficient_disk_space`](https:\/\/github.com\/huggingface\/datasets\/blob\/8a03ab7d123a76ee744304f21ce868c75f411214\/src\/datasets\/utils\/py_utils.py#L332) fails on each job because of how the cluster system is designed (`disk_usage(\".\").free` can't compute on the cluster's shared disk).\r\n\r\n\r\nThis is exactly where the error gets thrown:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/builder.py#L502\r\n\r\n```python\r\nif not utils.has_sufficient_disk_space(self.info.size_in_bytes or 0, directory=self._cache_dir_root):\r\n raise IOError(\r\n \"Not enough disk space. Needed: {} (download: {}, generated: {}, post-processed: {})\".format(\r\n utils.size_str(self.info.size_in_bytes or 0),\r\n utils.size_str(self.info.download_size or 0),\r\n utils.size_str(self.info.dataset_size or 0),\r\n utils.size_str(self.info.post_processing_size or 0),\r\n )\r\n )\r\n\r\n```\r\n\r\nWhat would be a good way to circumvent this? my current fix is to manually comment out that part, but that is not ideal. \r\nWould it be possible to pass a flag to skip this check on disk space?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1785\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1785\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1784","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1784\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1784\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1784\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1784","id":794659174,"node_id":"MDU6SXNzdWU3OTQ2NTkxNzQ=","number":1784,"title":"JSONDecodeError on JSON with multiple lines","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-27T00:19:22Z","updated_at":"2021-01-31T08:47:18Z","closed_at":"2021-01-31T08:47:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello :),\r\n\r\nI have been trying to load data using a JSON file. Based on the [docs](https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#json-files), the following format is supported:\r\n\r\n```json\r\n{\"key1\":11, \"key2\":12, \"key3\":13}\r\n{\"key1\":21, \"key2\":22, \"key3\":23}\r\n```\r\n But, when I try loading a dataset with the same format, I get a JSONDecodeError : `JSONDecodeError: Extra data: line 2 column 1 (char 7142)`. Now, this is expected when using `json` to load a JSON file. But I was wondering if there are any special arguments to pass when using `load_dataset` as the docs suggest that this format is supported.\r\n\r\nWhen I convert the JSON file to a list of dictionaries format, I get AttributeError: `AttributeError: 'list' object has no attribute 'keys'`. So, I can't convert them to list of dictionaries either.\r\n\r\nPlease let me know :)\r\n\r\nThanks,\r\nGunjan","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1784\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1784\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1783","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1783\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1783\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1783\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1783","id":794544495,"node_id":"MDU6SXNzdWU3OTQ1NDQ0OTU=","number":1783,"title":"Dataset Examples Explorer","user":{"login":"ChewKokWah","id":30875246,"node_id":"MDQ6VXNlcjMwODc1MjQ2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30875246?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ChewKokWah","html_url":"https:\/\/github.com\/ChewKokWah","followers_url":"https:\/\/api.github.com\/users\/ChewKokWah\/followers","following_url":"https:\/\/api.github.com\/users\/ChewKokWah\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ChewKokWah\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ChewKokWah\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ChewKokWah\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ChewKokWah\/orgs","repos_url":"https:\/\/api.github.com\/users\/ChewKokWah\/repos","events_url":"https:\/\/api.github.com\/users\/ChewKokWah\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ChewKokWah\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-26T20:39:02Z","updated_at":"2021-02-01T13:58:44Z","closed_at":"2021-02-01T13:58:44Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the Older version of the Dataset, there are a useful Dataset Explorer that allow user to visualize the examples (training, test and validation) of a particular dataset, it is no longer there in current version.\r\n\r\nHope HuggingFace can re-enable the feature that at least allow viewing of the first 20 examples of a particular dataset, or alternatively can extract 20 examples for each datasets and make those part of the Dataset Card Documentation.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1783\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1783\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1782","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1782\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1782\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1782\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1782","id":794167920,"node_id":"MDExOlB1bGxSZXF1ZXN0NTYxNzI5OTc3","number":1782,"title":"Update pyarrow import warning","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-26T11:47:11Z","updated_at":"2021-01-26T13:50:50Z","closed_at":"2021-01-26T13:50:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1782","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1782","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1782.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1782.patch","merged_at":"2021-01-26T13:50:49Z"},"body":"Update the minimum version to >=0.17.1 in the pyarrow version check and update the message.\r\n\r\nI also moved the check at the top of the __init__.py","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1782\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1782\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1781","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1781\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1781\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1781\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1781","id":793914556,"node_id":"MDU6SXNzdWU3OTM5MTQ1NTY=","number":1781,"title":"AttributeError: module 'pyarrow' has no attribute 'PyExtensionType' during import ","user":{"login":"PalaashAgrawal","id":45964869,"node_id":"MDQ6VXNlcjQ1OTY0ODY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/45964869?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PalaashAgrawal","html_url":"https:\/\/github.com\/PalaashAgrawal","followers_url":"https:\/\/api.github.com\/users\/PalaashAgrawal\/followers","following_url":"https:\/\/api.github.com\/users\/PalaashAgrawal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PalaashAgrawal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PalaashAgrawal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PalaashAgrawal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PalaashAgrawal\/orgs","repos_url":"https:\/\/api.github.com\/users\/PalaashAgrawal\/repos","events_url":"https:\/\/api.github.com\/users\/PalaashAgrawal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PalaashAgrawal\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-01-26T04:18:35Z","updated_at":"2021-01-26T11:47:36Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm using Colab. And suddenly this morning, there is this error. Have a look below!\r\n\r\n![screenshot-colab research google com-2021 01 26-08-15-36](https:\/\/user-images.githubusercontent.com\/45964869\/105799890-fdaf3b80-5fae-11eb-8f06-11b65cdccc30.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1781\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1781\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1780","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1780\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1780\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1780\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1780","id":793882132,"node_id":"MDExOlB1bGxSZXF1ZXN0NTYxNDkxNTgy","number":1780,"title":"Update SciFact URL","user":{"login":"dwadden","id":3091916,"node_id":"MDQ6VXNlcjMwOTE5MTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3091916?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dwadden","html_url":"https:\/\/github.com\/dwadden","followers_url":"https:\/\/api.github.com\/users\/dwadden\/followers","following_url":"https:\/\/api.github.com\/users\/dwadden\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dwadden\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dwadden\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dwadden\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dwadden\/orgs","repos_url":"https:\/\/api.github.com\/users\/dwadden\/repos","events_url":"https:\/\/api.github.com\/users\/dwadden\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dwadden\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-01-26T02:49:06Z","updated_at":"2021-01-28T18:48:00Z","closed_at":"2021-01-28T10:19:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1780","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1780","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1780.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1780.patch","merged_at":"2021-01-28T10:19:45Z"},"body":"Hi,\r\n\r\nI'm following up this [issue](https:\/\/github.com\/huggingface\/datasets\/issues\/1717). I'm the SciFact dataset creator, and I'm trying to update the SciFact data url in your repo. Thanks again for adding the dataset!\r\n\r\nBasically, I'd just like to change the `_URL` to `\"https:\/\/scifact.s3-us-west-2.amazonaws.com\/release\/latest\/data.tar.gz\"`. I changed `scifact.py` appropriately and tried running\r\n\r\n```\r\npython datasets-cli test datasets\/scifact --save_infos --all_configs\r\n```\r\nwhich I was hoping would update the `dataset_infos.json` for SciFact. But for some reason the code still seems to be looking for the old version of the dataset. Full stack trace below. I've tried to clear all my Huggingface-related caches, and I've `git grep`'d to make sure that the old path to the dataset isn't floating around somewhere. So I'm not sure why this is happening?\r\n\r\nCan you help me switch the download URL?\r\n\r\n```\r\n(datasets) $ python datasets-cli test datasets\/scifact --save_infos --all_configs\r\nChecking datasets\/scifact\/scifact.py for additional imports.\r\nFound main folder for dataset datasets\/scifact\/scifact.py at \/Users\/dwadden\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/scifact\r\nFound specific version folder for dataset datasets\/scifact\/scifact.py at \/Users\/dwadden\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/scifact\/2b43b4e125ce3369da7d6353961d9d315e6593f24cc7bbe9ede5e5c911d11534\r\nFound script file from datasets\/scifact\/scifact.py to \/Users\/dwadden\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/scifact\/2b43b4e125ce3369da7d6353961d9d315e6593f24cc7bbe9ede5e5c911d11534\/scifact.py\r\nFound dataset infos file from datasets\/scifact\/dataset_infos.json to \/Users\/dwadden\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/scifact\/2b43b4e125ce3369da7d6353961d9d315e6593f24cc7bbe9ede5e5c911d11534\/dataset_infos.json\r\nFound metadata file for dataset datasets\/scifact\/scifact.py at \/Users\/dwadden\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/scifact\/2b43b4e125ce3369da7d6353961d9d315e6593f24cc7bbe9ede5e5c911d11534\/scifact.json\r\nLoading Dataset Infos from \/Users\/dwadden\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/scifact\/2b43b4e125ce3369da7d6353961d9d315e6593f24cc7bbe9ede5e5c911d11534\r\nTesting builder 'corpus' (1\/2)\r\nGenerating dataset scifact (\/Users\/dwadden\/.cache\/huggingface\/datasets\/scifact\/corpus\/1.0.0\/2b43b4e125ce3369da7d6353961d9d315e6593f24cc7bbe9ede5e5c911d11534)\r\nDownloading and preparing dataset scifact\/corpus (download: 2.72 MiB, generated: 7.63 MiB, post-processed: Unknown size, total: 10.35 MiB) to \/Users\/dwadden\/.cache\/huggingface\/datasets\/scifact\/corpus\/1.0.0\/2b43b4e125ce3369da7d6353961d9d315e6593f24cc7bbe9ede5e5c911d11534...\r\nDownloading took 0.0 min\r\nChecksum Computation took 0.0 min\r\nTraceback (most recent call last):\r\n File \"\/Users\/dwadden\/proj\/datasets\/datasets-cli\", line 36, in \r\n service.run()\r\n File \"\/Users\/dwadden\/proj\/datasets\/src\/datasets\/commands\/test.py\", line 139, in run\r\n builder.download_and_prepare(\r\n File \"\/Users\/dwadden\/proj\/datasets\/src\/datasets\/builder.py\", line 562, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/Users\/dwadden\/proj\/datasets\/src\/datasets\/builder.py\", line 622, in _download_and_prepare\r\n verify_checksums(\r\n File \"\/Users\/dwadden\/proj\/datasets\/src\/datasets\/utils\/info_utils.py\", line 32, in verify_checksums\r\n raise ExpectedMoreDownloadedFiles(str(set(expected_checksums) - set(recorded_checksums)))\r\ndatasets.utils.info_utils.ExpectedMoreDownloadedFiles: {'https:\/\/ai2-s2-scifact.s3-us-west-2.amazonaws.com\/release\/2020-05-01\/data.tar.gz'}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1780\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1780\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1779","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1779\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1779\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1779\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1779","id":793539703,"node_id":"MDExOlB1bGxSZXF1ZXN0NTYxMjEwNjI5","number":1779,"title":"Ignore definition line number of functions for caching","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-25T16:42:29Z","updated_at":"2021-01-26T10:20:20Z","closed_at":"2021-01-26T10:20:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1779","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1779","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1779.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1779.patch","merged_at":"2021-01-26T10:20:19Z"},"body":"As noticed in #1718 , when a function used for processing with `map` is moved inside its python file, then the change of line number causes the caching mechanism to consider it as a different function. Therefore in this case, it recomputes everything.\r\n\r\nThis is because we were not ignoring the line number definition for such functions (even though we're doing it for lambda functions).\r\n\r\nFor example this code currently prints False:\r\n```python\r\nfrom datasets.fingerprint import Hasher\r\n\r\n# define once\r\ndef foo(x):\r\n return x\r\n\r\nh = Hasher.hash(foo)\r\n\r\n# define a second time elsewhere\r\ndef foo(x):\r\n return x\r\n\r\nprint(h == Hasher.hash(foo))\r\n```\r\n\r\nI changed this by ignoring the line number for all functions.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1779\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1779\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1778","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1778\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1778\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1778\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1778","id":793474507,"node_id":"MDExOlB1bGxSZXF1ZXN0NTYxMTU2Mzk1","number":1778,"title":"Narrative QA Manual","user":{"login":"rsanjaykamath","id":18527321,"node_id":"MDQ6VXNlcjE4NTI3MzIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18527321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rsanjaykamath","html_url":"https:\/\/github.com\/rsanjaykamath","followers_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/followers","following_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/orgs","repos_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/repos","events_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-01-25T15:22:31Z","updated_at":"2021-01-29T09:35:14Z","closed_at":"2021-01-29T09:34:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1778","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1778","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1778.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1778.patch","merged_at":"2021-01-29T09:34:51Z"},"body":"Submitting the manual version of Narrative QA script which requires a manual download from the original repository","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1778\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1778\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1777","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1777\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1777\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1777\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1777","id":793273770,"node_id":"MDU6SXNzdWU3OTMyNzM3NzA=","number":1777,"title":"GPT2 MNLI training using run_glue.py","user":{"login":"nlp-student","id":76427077,"node_id":"MDQ6VXNlcjc2NDI3MDc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/76427077?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nlp-student","html_url":"https:\/\/github.com\/nlp-student","followers_url":"https:\/\/api.github.com\/users\/nlp-student\/followers","following_url":"https:\/\/api.github.com\/users\/nlp-student\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nlp-student\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nlp-student\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nlp-student\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nlp-student\/orgs","repos_url":"https:\/\/api.github.com\/users\/nlp-student\/repos","events_url":"https:\/\/api.github.com\/users\/nlp-student\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nlp-student\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-25T10:53:52Z","updated_at":"2021-01-25T11:12:53Z","closed_at":"2021-01-25T11:12:53Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Edit: I'm closing this because I actually meant to post this in `transformers `not `datasets`\r\n\r\nRunning this on Google Colab,\r\n\r\n```\r\n!python run_glue.py \\\r\n --model_name_or_path gpt2 \\\r\n --task_name mnli \\\r\n --do_train \\\r\n --do_eval \\\r\n --max_seq_length 128 \\\r\n --per_gpu_train_batch_size 10 \\\r\n --gradient_accumulation_steps 32\\\r\n --learning_rate 2e-5 \\\r\n --num_train_epochs 3.0 \\\r\n --output_dir models\/gpt2\/mnli\/\r\n```\r\n\r\nI get the following error,\r\n\r\n```\r\n \"Asking to pad but the tokenizer does not have a padding token. \"\r\nValueError: Asking to pad but the tokenizer does not have a padding token. Please select a token to use as `pad_token` `(tokenizer.pad_token = tokenizer.eos_token e.g.)` or add a new pad token via `tokenizer.add_special_tokens({'pad_token': '[PAD]'})`.\r\n```\r\n\r\nDo I need to modify the trainer to work with GPT2 ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1777\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1777\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1776","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1776\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1776\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1776\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1776","id":792755249,"node_id":"MDU6SXNzdWU3OTI3NTUyNDk=","number":1776,"title":"[Question & Bug Report] Can we preprocess a dataset on the fly?","user":{"login":"shuaihuaiyi","id":14048129,"node_id":"MDQ6VXNlcjE0MDQ4MTI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14048129?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shuaihuaiyi","html_url":"https:\/\/github.com\/shuaihuaiyi","followers_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/followers","following_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/orgs","repos_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/repos","events_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-01-24T09:28:24Z","updated_at":"2021-05-20T04:15:58Z","closed_at":"2021-05-20T04:15:58Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I know we can use `Datasets.map` to preprocess a dataset, but I'm using it with very large corpus which generates huge cache file (several TB cache from a 400 GB text file). I have no disk large enough to save it. Can we preprocess a dataset on the fly without generating cache?\r\n\r\nBTW, I tried raising `writer_batch_size`. Seems that argument doesn't have any effect when it's larger than `batch_size`, because you are saving all the batch instantly after it's processed. Please check the following code:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/0281f9d881f3a55c89aeaa642f1ba23444b64083\/src\/datasets\/arrow_dataset.py#L1532","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1776\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1776\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1775","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1775\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1775\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1775\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1775","id":792742120,"node_id":"MDU6SXNzdWU3OTI3NDIxMjA=","number":1775,"title":"Efficient ways to iterate the dataset","user":{"login":"zhongpeixiang","id":11826803,"node_id":"MDQ6VXNlcjExODI2ODAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11826803?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zhongpeixiang","html_url":"https:\/\/github.com\/zhongpeixiang","followers_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/followers","following_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/orgs","repos_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/repos","events_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-24T07:54:31Z","updated_at":"2021-01-24T09:50:39Z","closed_at":"2021-01-24T09:50:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"For a large dataset that does not fits the memory, how can I select only a subset of features from each example?\r\n\r\nIf I iterate over the dataset and then select the subset of features one by one, the resulted memory usage will be huge. Any ways to solve this?\r\n\r\nThanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1775\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1775\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1774","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1774\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1774\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1774\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1774","id":792730559,"node_id":"MDU6SXNzdWU3OTI3MzA1NTk=","number":1774,"title":"is it possible to make slice to be more compatible like python list and numpy?","user":{"login":"world2vec","id":7607120,"node_id":"MDQ6VXNlcjc2MDcxMjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7607120?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/world2vec","html_url":"https:\/\/github.com\/world2vec","followers_url":"https:\/\/api.github.com\/users\/world2vec\/followers","following_url":"https:\/\/api.github.com\/users\/world2vec\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/world2vec\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/world2vec\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/world2vec\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/world2vec\/orgs","repos_url":"https:\/\/api.github.com\/users\/world2vec\/repos","events_url":"https:\/\/api.github.com\/users\/world2vec\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/world2vec\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-24T06:15:52Z","updated_at":"2021-01-24T23:36:18Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nsee below error:\r\n```\r\nAssertionError: Requested slice [:10000000000000000] incompatible with 20 examples.\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1774\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1774\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1773","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1773\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1773\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1773\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1773","id":792708160,"node_id":"MDU6SXNzdWU3OTI3MDgxNjA=","number":1773,"title":"bug in loading datasets ","user":{"login":"ghost","id":10137,"node_id":"MDQ6VXNlcjEwMTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghost","html_url":"https:\/\/github.com\/ghost","followers_url":"https:\/\/api.github.com\/users\/ghost\/followers","following_url":"https:\/\/api.github.com\/users\/ghost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghost\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghost\/repos","events_url":"https:\/\/api.github.com\/users\/ghost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghost\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-01-24T02:53:45Z","updated_at":"2021-09-06T08:54:46Z","closed_at":"2021-08-04T18:13:01Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI need to load a dataset, I use these commands:\r\n\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('csv', data_files={'train': 'sick\/train.csv',\r\n 'test': 'sick\/test.csv',\r\n 'validation': 'sick\/validation.csv'})\r\nprint(dataset['validation'])\r\n```\r\nthe dataset in sick\/train.csv are simple csv files representing the data. I am getting this error, do you have an idea how I can solve this? thank you @lhoestq \r\n\r\n \r\n```\r\nUsing custom data configuration default\r\nDownloading and preparing dataset csv\/default-61468fc71a743ec1 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/julia\/cache_home_2\/datasets\/csv\/default-61468fc71a743ec1\/0.0.0\/2960f95a26e85d40ca41a230ac88787f715ee3003edaacb8b1f0891e9f04dda2...\r\nTraceback (most recent call last):\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets-1.2.0-py3.7.egg\/datasets\/builder.py\", line 485, in incomplete_dir\r\n yield tmp_dir\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets-1.2.0-py3.7.egg\/datasets\/builder.py\", line 527, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets-1.2.0-py3.7.egg\/datasets\/builder.py\", line 604, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets-1.2.0-py3.7.egg\/datasets\/builder.py\", line 959, in _prepare_split\r\n for key, table in utils.tqdm(generator, unit=\" tables\", leave=False, disable=not_verbose):\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/tqdm-4.49.0-py3.7.egg\/tqdm\/std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"\/julia\/cache_home_2\/modules\/datasets_modules\/datasets\/csv\/2960f95a26e85d40ca41a230ac88787f715ee3003edaacb8b1f0891e9f04dda2\/csv.py\", line 129, in _generate_tables\r\n for batch_idx, df in enumerate(csv_file_reader):\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/pandas-1.2.0-py3.7-linux-x86_64.egg\/pandas\/io\/parsers.py\", line 1029, in __next__\r\n return self.get_chunk()\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/pandas-1.2.0-py3.7-linux-x86_64.egg\/pandas\/io\/parsers.py\", line 1079, in get_chunk\r\n return self.read(nrows=size)\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/pandas-1.2.0-py3.7-linux-x86_64.egg\/pandas\/io\/parsers.py\", line 1052, in read\r\n index, columns, col_dict = self._engine.read(nrows)\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/pandas-1.2.0-py3.7-linux-x86_64.egg\/pandas\/io\/parsers.py\", line 2056, in read\r\n data = self._reader.read(nrows)\r\n File \"pandas\/_libs\/parsers.pyx\", line 756, in pandas._libs.parsers.TextReader.read\r\n File \"pandas\/_libs\/parsers.pyx\", line 783, in pandas._libs.parsers.TextReader._read_low_memory\r\n File \"pandas\/_libs\/parsers.pyx\", line 827, in pandas._libs.parsers.TextReader._read_rows\r\n File \"pandas\/_libs\/parsers.pyx\", line 814, in pandas._libs.parsers.TextReader._tokenize_rows\r\n File \"pandas\/_libs\/parsers.pyx\", line 1951, in pandas._libs.parsers.raise_parser_error\r\npandas.errors.ParserError: Error tokenizing data. C error: Expected 1 fields in line 37, saw 2\r\n\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"write_sick.py\", line 19, in \r\n 'validation': 'sick\/validation.csv'})\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets-1.2.0-py3.7.egg\/datasets\/load.py\", line 612, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets-1.2.0-py3.7.egg\/datasets\/builder.py\", line 534, in download_and_prepare\r\n self._save_info()\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/contextlib.py\", line 130, in __exit__\r\n self.gen.throw(type, value, traceback)\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/site-packages\/datasets-1.2.0-py3.7.egg\/datasets\/builder.py\", line 491, in incomplete_dir\r\n shutil.rmtree(tmp_dir)\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/shutil.py\", line 498, in rmtree\r\n onerror(os.rmdir, path, sys.exc_info())\r\n File \"\/julia\/libs\/anaconda3\/envs\/success\/lib\/python3.7\/shutil.py\", line 496, in rmtree\r\n os.rmdir(path)\r\nOSError: [Errno 39] Directory not empty: '\/julia\/cache_home_2\/datasets\/csv\/default-61468fc71a743ec1\/0.0.0\/2960f95a26e85d40ca41a230ac88787f715ee3003edaacb8b1f0891e9f04dda2.incomplete'\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1773\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1773\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1772","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1772\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1772\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1772\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1772","id":792703797,"node_id":"MDU6SXNzdWU3OTI3MDM3OTc=","number":1772,"title":"Adding SICK dataset","user":{"login":"ghost","id":10137,"node_id":"MDQ6VXNlcjEwMTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghost","html_url":"https:\/\/github.com\/ghost","followers_url":"https:\/\/api.github.com\/users\/ghost\/followers","following_url":"https:\/\/api.github.com\/users\/ghost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghost\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghost\/repos","events_url":"https:\/\/api.github.com\/users\/ghost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghost\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-24T02:15:31Z","updated_at":"2021-02-05T15:49:25Z","closed_at":"2021-02-05T15:49:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nIt would be great to include SICK dataset.\r\n\r\n## Adding a Dataset\r\n- **Name:** SICK\r\n- **Description:** a well known entailment dataset \r\n- **Paper:** http:\/\/marcobaroni.org\/composes\/sick.html\r\n- **Data:** http:\/\/marcobaroni.org\/composes\/sick.html\r\n- **Motivation:** this is an important NLI benchmark\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\n\r\n\r\nthanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1772\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1772\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1771","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1771\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1771\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1771\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1771","id":792701276,"node_id":"MDU6SXNzdWU3OTI3MDEyNzY=","number":1771,"title":"Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.2.1\/datasets\/csv\/csv.py","user":{"login":"world2vec","id":7607120,"node_id":"MDQ6VXNlcjc2MDcxMjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7607120?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/world2vec","html_url":"https:\/\/github.com\/world2vec","followers_url":"https:\/\/api.github.com\/users\/world2vec\/followers","following_url":"https:\/\/api.github.com\/users\/world2vec\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/world2vec\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/world2vec\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/world2vec\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/world2vec\/orgs","repos_url":"https:\/\/api.github.com\/users\/world2vec\/repos","events_url":"https:\/\/api.github.com\/users\/world2vec\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/world2vec\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-01-24T01:53:52Z","updated_at":"2021-01-24T23:06:29Z","closed_at":"2021-01-24T23:06:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nWhen I load_dataset from local csv files, below error happened, looks raw.githubusercontent.com was blocked by the chinese government. But why it need to download csv.py? should it include when pip install the dataset?\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\/home\/tom\/pyenv\/pystory\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/home\/tom\/pyenv\/pystory\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 343, in cached_path\r\n max_retries=download_config.max_retries,\r\n File \"\/home\/tom\/pyenv\/pystory\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 617, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.2.1\/datasets\/csv\/csv.py\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1771\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1771\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1770","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1770\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1770\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1770\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1770","id":792698148,"node_id":"MDU6SXNzdWU3OTI2OTgxNDg=","number":1770,"title":"how can I combine 2 dataset with different\/same features?","user":{"login":"world2vec","id":7607120,"node_id":"MDQ6VXNlcjc2MDcxMjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7607120?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/world2vec","html_url":"https:\/\/github.com\/world2vec","followers_url":"https:\/\/api.github.com\/users\/world2vec\/followers","following_url":"https:\/\/api.github.com\/users\/world2vec\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/world2vec\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/world2vec\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/world2vec\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/world2vec\/orgs","repos_url":"https:\/\/api.github.com\/users\/world2vec\/repos","events_url":"https:\/\/api.github.com\/users\/world2vec\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/world2vec\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-24T01:26:06Z","updated_at":"2021-01-24T23:43:54Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"to combine 2 dataset by one-one map like ds = zip(ds1, ds2):\r\nds1: {'text'}, ds2: {'text'}, combine ds:{'src', 'tgt'} \r\nor different feature:\r\nds1: {'src'}, ds2: {'tgt'}, combine ds:{'src', 'tgt'}","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1770\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1770\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1769","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1769\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1769\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1769\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1769","id":792523284,"node_id":"MDU6SXNzdWU3OTI1MjMyODQ=","number":1769,"title":"_pickle.PicklingError: Can't pickle typing.Union[str, NoneType]: it's not the same object as typing.Union when calling datasets.map with num_proc=2","user":{"login":"shuaihuaiyi","id":14048129,"node_id":"MDQ6VXNlcjE0MDQ4MTI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14048129?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shuaihuaiyi","html_url":"https:\/\/github.com\/shuaihuaiyi","followers_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/followers","following_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/orgs","repos_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/repos","events_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shuaihuaiyi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-01-23T10:13:00Z","updated_at":"2021-01-25T10:23:57Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It may be a bug of multiprocessing with Datasets, when I disable the multiprocessing by set num_proc to None, everything works fine.\r\n\r\nThe script I use is https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/language-modeling\/run_mlm_wwm.py\r\n\r\nScript args:\r\n\r\n```\r\n--model_name_or_path\r\n..\/..\/..\/model\/chinese-roberta-wwm-ext\r\n--train_file\r\n\/nfs\/volume-377-2\/bert\/data\/test\/train.txt\r\n--output_dir\r\ntest\r\n--do_train\r\n--per_device_train_batch_size\r\n2\r\n--gradient_accumulation_steps\r\n2\r\n--learning_rate\r\n1e-4\r\n--max_steps\r\n1000\r\n--warmup_steps\r\n10\r\n--save_steps\r\n1000\r\n--save_total_limit\r\n1\r\n--seed\r\n23333\r\n--max_seq_length\r\n512\r\n--preprocessing_num_workers\r\n2\r\n--cache_dir\r\n\/nfs\/volume-377-2\/bert\/data\/test\/cache\r\n```\r\n\r\nWhere the `\/nfs\/volume-377-2\/bert\/data\/test\/train.txt` is just a toy example with 10000 lines of random string, you should be able to reproduce this error esaily.\r\n\r\nFull Traceback:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\/nfs\/volume-377-2\/bert\/transformers\/examples\/language-modeling\/run_mlm_wwm.py\", line 398, in \r\n main()\r\n File \"\/nfs\/volume-377-2\/bert\/transformers\/examples\/language-modeling\/run_mlm_wwm.py\", line 325, in main\r\n load_from_cache_file=not data_args.overwrite_cache,\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/datasets\/dataset_dict.py\", line 303, in map\r\n for k, dataset in self.items()\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/datasets\/dataset_dict.py\", line 303, in \r\n for k, dataset in self.items()\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py\", line 1318, in map\r\n transformed_shards = [r.get() for r in results]\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py\", line 1318, in \r\n transformed_shards = [r.get() for r in results]\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/multiprocess\/pool.py\", line 644, in get\r\n raise self._value\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/multiprocess\/pool.py\", line 424, in _handle_tasks\r\n put(task)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/multiprocess\/connection.py\", line 209, in send\r\n self._send_bytes(_ForkingPickler.dumps(obj))\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/multiprocess\/reduction.py\", line 54, in dumps\r\n cls(buf, protocol, *args, **kwds).dump(obj)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 446, in dump\r\n StockPickler.dump(self, obj)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 409, in dump\r\n self.save(obj)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 751, in save_tuple\r\n save(element)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 933, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 1438, in save_function\r\n obj.__dict__, fkwdefaults), obj=obj)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 610, in save_reduce\r\n save(args)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 751, in save_tuple\r\n save(element)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 736, in save_tuple\r\n save(element)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 1170, in save_cell\r\n pickler.save_reduce(_create_cell, (f,), obj=obj)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 610, in save_reduce\r\n save(args)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 736, in save_tuple\r\n save(element)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 521, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 605, in save_reduce\r\n save(cls)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 1365, in save_type\r\n obj.__bases__, _dict), obj=obj)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 610, in save_reduce\r\n save(args)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 751, in save_tuple\r\n save(element)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 933, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 933, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 507, in save\r\n self.save_global(obj, rv)\r\n File \"\/home\/luban\/miniconda3\/envs\/py36\/lib\/python3.6\/pickle.py\", line 927, in save_global\r\n (obj, module_name, name))\r\n_pickle.PicklingError: Can't pickle typing.Union[str, NoneType]: it's not the same object as typing.Union\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1769\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1769\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1768","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1768\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1768\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1768\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1768","id":792150745,"node_id":"MDExOlB1bGxSZXF1ZXN0NTYwMDgyNzIx","number":1768,"title":"Mention kwargs in the Dataset Formatting docs","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-22T16:43:20Z","updated_at":"2021-01-31T12:33:10Z","closed_at":"2021-01-25T09:14:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1768","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1768","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1768.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1768.patch","merged_at":"2021-01-25T09:14:59Z"},"body":"Hi,\r\n\r\nThis was discussed in Issue #1762 where the docs didn't mention that keyword arguments to `datasets.Dataset.set_format()` are allowed. \r\nTo prevent people from having to check the code\/method docs, I just added a couple of lines in the docs.\r\n\r\nPlease let me know your thoughts on this.\r\n\r\nThanks,\r\nGunjan\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1768\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1768\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1767","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1767\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1767\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1767\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1767","id":792068497,"node_id":"MDExOlB1bGxSZXF1ZXN0NTYwMDE2MzE2","number":1767,"title":"Add Librispeech ASR","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-22T14:54:37Z","updated_at":"2021-01-25T20:38:07Z","closed_at":"2021-01-25T20:37:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1767","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1767","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1767.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1767.patch","merged_at":"2021-01-25T20:37:42Z"},"body":"This PR adds the librispeech asr dataset: https:\/\/www.tensorflow.org\/datasets\/catalog\/librispeech\r\n\r\nThere are 2 configs: \"clean\" and \"other\" whereas there are two \"train\" datasets for \"clean\", hence the name \"train.100\" and \"train.360\".\r\n\r\nAs suggested by @lhoestq, due to the enormous size of the dataset in `.arrow` format, the speech files are not directly prepared to a float32-array, but instead just the path to the array file is stored.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1767\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1767\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1766","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1766\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1766\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1766\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1766","id":792044105,"node_id":"MDU6SXNzdWU3OTIwNDQxMDU=","number":1766,"title":"Issues when run two programs compute the same metrics","user":{"login":"lamthuy","id":8089862,"node_id":"MDQ6VXNlcjgwODk4NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8089862?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lamthuy","html_url":"https:\/\/github.com\/lamthuy","followers_url":"https:\/\/api.github.com\/users\/lamthuy\/followers","following_url":"https:\/\/api.github.com\/users\/lamthuy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lamthuy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lamthuy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lamthuy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lamthuy\/orgs","repos_url":"https:\/\/api.github.com\/users\/lamthuy\/repos","events_url":"https:\/\/api.github.com\/users\/lamthuy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lamthuy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-22T14:22:55Z","updated_at":"2021-02-02T10:38:06Z","closed_at":"2021-02-02T10:38:06Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I got the following error when running two different programs that both compute sacreblue metrics. It seems that both read\/and\/write to the same location (.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow) where it caches the batches:\r\n\r\n```\r\nFile \"train_matching_min.py\", line 160, in ch_9_label\r\n avg_loss = valid(epoch, args.batch, args.validation, args.with_label)\r\n File \"train_matching_min.py\", line 93, in valid\r\n bleu += eval.compute()\r\n File \"\/u\/tlhoang\/projects\/seal\/match\/models\/eval.py\", line 23, in compute\r\n return self.metric.compute()['score']\r\n File \"\/dccstor\/know\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/metric.py\", line 387, in compute\r\n self._finalize()\r\n File \"\/dccstor\/know\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/metric.py\", line 355, in _finalize\r\n self.data = Dataset(**reader.read_files([{\"filename\": f} for f in file_paths]))\r\n File \"\/dccstor\/know\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/arrow_reader.py\", line 231, in read_files\r\n pa_table = self._read_files(files)\r\n File \"\/dccstor\/know\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/arrow_reader.py\", line 170, in _read_files\r\n pa_table: pa.Table = self._get_dataset_from_filename(f_dict)\r\n File \"\/dccstor\/know\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/arrow_reader.py\", line 299, in _get_dataset_from_filename\r\n pa_table = f.read_all()\r\n File \"pyarrow\/ipc.pxi\", line 481, in pyarrow.lib.RecordBatchReader.read_all\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Expected to read 1819307375 metadata bytes, but only read 454396\r\n``` ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1766\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1766\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1765","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1765\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1765\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1765\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1765","id":791553065,"node_id":"MDU6SXNzdWU3OTE1NTMwNjU=","number":1765,"title":"Error iterating over Dataset with DataLoader","user":{"login":"EvanZ","id":1295082,"node_id":"MDQ6VXNlcjEyOTUwODI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1295082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/EvanZ","html_url":"https:\/\/github.com\/EvanZ","followers_url":"https:\/\/api.github.com\/users\/EvanZ\/followers","following_url":"https:\/\/api.github.com\/users\/EvanZ\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/EvanZ\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/EvanZ\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/EvanZ\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/EvanZ\/orgs","repos_url":"https:\/\/api.github.com\/users\/EvanZ\/repos","events_url":"https:\/\/api.github.com\/users\/EvanZ\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/EvanZ\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-01-21T22:56:45Z","updated_at":"2021-12-07T12:22:33Z","closed_at":"2021-01-23T03:44:14Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I have a Dataset that I've mapped a tokenizer over:\r\n\r\n```\r\nencoded_dataset.set_format(type='torch',columns=['attention_mask','input_ids','token_type_ids'])\r\nencoded_dataset[:1]\r\n```\r\n```\r\n{'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]),\r\n 'input_ids': tensor([[ 101, 178, 1198, 1400, 1714, 22233, 21365, 4515, 8618, 1113,\r\n 102]]),\r\n 'token_type_ids': tensor([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])}\r\n```\r\n\r\nWhen I try to iterate as in the docs, I get errors:\r\n\r\n```\r\ndataloader = torch.utils.data.DataLoader(encoded_dataset, batch_sampler=32)\r\nnext(iter(dataloader))\r\n```\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in ()\r\n 1 dataloader = torch.utils.data.DataLoader(encoded_dataset, batch_sampler=32)\r\n----> 2 next(iter(dataloader))\r\n\r\n3 frames\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/torch\/utils\/data\/dataloader.py in __init__(self, loader)\r\n 411 self._timeout = loader.timeout\r\n 412 self._collate_fn = loader.collate_fn\r\n--> 413 self._sampler_iter = iter(self._index_sampler)\r\n 414 self._base_seed = torch.empty((), dtype=torch.int64).random_(generator=loader.generator).item()\r\n 415 self._persistent_workers = loader.persistent_workers\r\n\r\nTypeError: 'int' object is not iterable\r\n\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1765\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1765\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1764","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1764\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1764\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1764\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1764","id":791486860,"node_id":"MDU6SXNzdWU3OTE0ODY4NjA=","number":1764,"title":"Connection Issues","user":{"login":"SaeedNajafi","id":12455298,"node_id":"MDQ6VXNlcjEyNDU1Mjk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12455298?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SaeedNajafi","html_url":"https:\/\/github.com\/SaeedNajafi","followers_url":"https:\/\/api.github.com\/users\/SaeedNajafi\/followers","following_url":"https:\/\/api.github.com\/users\/SaeedNajafi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SaeedNajafi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SaeedNajafi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SaeedNajafi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SaeedNajafi\/orgs","repos_url":"https:\/\/api.github.com\/users\/SaeedNajafi\/repos","events_url":"https:\/\/api.github.com\/users\/SaeedNajafi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SaeedNajafi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-21T20:56:09Z","updated_at":"2021-01-21T21:00:19Z","closed_at":"2021-01-21T21:00:02Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Today, I am getting connection issues while loading a dataset and the metric.\r\n```\r\nTraceback (most recent call last):\r\n File \"src\/train.py\", line 180, in \r\n train_dataset, dev_dataset, test_dataset = create_race_dataset()\r\n File \"src\/train.py\", line 130, in create_race_dataset\r\n train_dataset = load_dataset(\"race\", \"all\", split=\"train\")\r\n File \"\/Users\/saeed\/Desktop\/codes\/repos\/dreamscape-qa\/env\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 591, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/Users\/saeed\/Desktop\/codes\/repos\/dreamscape-qa\/env\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/Users\/saeed\/Desktop\/codes\/repos\/dreamscape-qa\/env\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 343, in cached_path\r\n max_retries=download_config.max_retries,\r\n File \"\/Users\/saeed\/Desktop\/codes\/repos\/dreamscape-qa\/env\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 617, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.2.1\/datasets\/race\/race.py\r\n```\r\n\r\nOr\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"src\/train.py\", line 105, in \r\n rouge = datasets.load_metric(\"rouge\")\r\n File \"\/Users\/saeed\/Desktop\/codes\/repos\/dreamscape-qa\/env\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 500, in load_metric\r\n dataset=False,\r\n File \"\/Users\/saeed\/Desktop\/codes\/repos\/dreamscape-qa\/env\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/Users\/saeed\/Desktop\/codes\/repos\/dreamscape-qa\/env\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 343, in cached_path\r\n max_retries=download_config.max_retries,\r\n File \"\/Users\/saeed\/Desktop\/codes\/repos\/dreamscape-qa\/env\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 617, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.2.1\/metrics\/rouge\/rouge.py\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1764\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1764\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1763","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1763\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1763\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1763\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1763","id":791389763,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU5NDU3MTY1","number":1763,"title":"PAWS-X: Fix csv Dictreader splitting data on quotes","user":{"login":"gowtham1997","id":9641196,"node_id":"MDQ6VXNlcjk2NDExOTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9641196?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gowtham1997","html_url":"https:\/\/github.com\/gowtham1997","followers_url":"https:\/\/api.github.com\/users\/gowtham1997\/followers","following_url":"https:\/\/api.github.com\/users\/gowtham1997\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gowtham1997\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gowtham1997\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gowtham1997\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gowtham1997\/orgs","repos_url":"https:\/\/api.github.com\/users\/gowtham1997\/repos","events_url":"https:\/\/api.github.com\/users\/gowtham1997\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gowtham1997\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-21T18:21:01Z","updated_at":"2021-01-22T10:14:33Z","closed_at":"2021-01-22T10:13:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1763","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1763","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1763.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1763.patch","merged_at":"2021-01-22T10:13:45Z"},"body":"\r\n```python\r\nfrom datasets import load_dataset\r\n# load english paws-x dataset \r\ndatasets = load_dataset('paws-x', 'en')\r\nprint(len(datasets['train'])) # outputs 49202 but official dataset has 49401 pairs\r\nprint(datasets['train'].unique('label')) # outputs [1, 0, -1] but labels are binary [0,1]\r\n```\r\n\r\nchanged `data = csv.DictReader(f, delimiter=\"\\t\")` to `data = csv.DictReader(f, delimiter=\"\\t\", quoting=csv.QUOTE_NONE)` in the dataloader to make csv module not split by quotes.\r\n\r\nThe results are as expected for all languages after the change.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1763\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1763\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1762","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1762\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1762\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1762\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1762","id":791226007,"node_id":"MDU6SXNzdWU3OTEyMjYwMDc=","number":1762,"title":"Unable to format dataset to CUDA Tensors","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-01-21T15:31:23Z","updated_at":"2021-02-02T07:13:22Z","closed_at":"2021-02-02T07:13:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI came across this [link](https:\/\/huggingface.co\/docs\/datasets\/torch_tensorflow.html) where the docs show show to convert a dataset to a particular format. I see that there is an option to convert it to tensors, but I don't see any option to convert it to CUDA tensors.\r\n\r\nI tried this, but Dataset doesn't support assignment:\r\n```\r\n columns=['input_ids', 'token_type_ids', 'attention_mask', 'start_positions','end_positions']\r\n\r\n samples.set_format(type='torch', columns = columns)\r\n for column in columns:\r\n samples[column].to(torch.device(self.config.device))\r\n```\r\nThere should be an option to do so, or if there is already a way to do this, please let me know.\r\n\r\nThanks,\r\nGunjan","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1762\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1762\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1761","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1761\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1761\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1761\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1761","id":791150858,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU5MjUyMzEw","number":1761,"title":"Add SILICONE benchmark","user":{"login":"eusip","id":1551356,"node_id":"MDQ6VXNlcjE1NTEzNTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1551356?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eusip","html_url":"https:\/\/github.com\/eusip","followers_url":"https:\/\/api.github.com\/users\/eusip\/followers","following_url":"https:\/\/api.github.com\/users\/eusip\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eusip\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eusip\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eusip\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eusip\/orgs","repos_url":"https:\/\/api.github.com\/users\/eusip\/repos","events_url":"https:\/\/api.github.com\/users\/eusip\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eusip\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-01-21T14:29:12Z","updated_at":"2021-02-04T14:32:48Z","closed_at":"2021-01-26T13:50:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1761","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1761","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1761.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1761.patch","merged_at":"2021-01-26T13:50:31Z"},"body":"My collaborators and I within the Affective Computing team at Telecom Paris would like to re-submit our spoken dialogue dataset for publication.\r\n\r\nThis is a new pull request relative to the [previously closed request](https:\/\/github.com\/huggingface\/datasets\/pull\/1712) which was reviewed by @lhoestq.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1761\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1761\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1760","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1760\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1760\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1760\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1760","id":791110857,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU5MjE3MjY0","number":1760,"title":"More tags","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-21T13:50:10Z","updated_at":"2021-01-22T09:40:01Z","closed_at":"2021-01-22T09:40:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1760","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1760","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1760.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1760.patch","merged_at":"2021-01-22T09:40:00Z"},"body":"Since the hub v2 is going to be released soon I figured it would be great to add the missing tags at least for some of the datasets of reference listed [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md#write-the-loadingprocessing-code)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1760\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1760\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1759","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1759\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1759\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1759\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1759","id":790992226,"node_id":"MDU6SXNzdWU3OTA5OTIyMjY=","number":1759,"title":"wikipedia dataset incomplete","user":{"login":"ChrisDelClea","id":19912393,"node_id":"MDQ6VXNlcjE5OTEyMzkz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19912393?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ChrisDelClea","html_url":"https:\/\/github.com\/ChrisDelClea","followers_url":"https:\/\/api.github.com\/users\/ChrisDelClea\/followers","following_url":"https:\/\/api.github.com\/users\/ChrisDelClea\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ChrisDelClea\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ChrisDelClea\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ChrisDelClea\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ChrisDelClea\/orgs","repos_url":"https:\/\/api.github.com\/users\/ChrisDelClea\/repos","events_url":"https:\/\/api.github.com\/users\/ChrisDelClea\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ChrisDelClea\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-01-21T11:47:15Z","updated_at":"2021-01-21T17:22:11Z","closed_at":"2021-01-21T17:21:06Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hey guys,\r\n\r\nI am using the https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/wikipedia dataset.\r\nUnfortunately, I found out that there is an incompleteness for the German dataset.\r\n For reasons unknown to me, the number of inhabitants has been removed from many pages:\r\nThorey-sur-Ouche has 128 inhabitants according to the webpage (https:\/\/de.wikipedia.org\/wiki\/Thorey-sur-Ouche).\r\nThe pickle file however shows: franz\u00f6sische Gemeinde mit Einwohnern (Stand).\r\n Is it possible to fix this?\r\n\r\nBest regards \r\nChris\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1759\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1759\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1758","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1758\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1758\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1758\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1758","id":790626116,"node_id":"MDU6SXNzdWU3OTA2MjYxMTY=","number":1758,"title":"dataset.search() (elastic) cannot reliably retrieve search results","user":{"login":"afogarty85","id":49048309,"node_id":"MDQ6VXNlcjQ5MDQ4MzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49048309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/afogarty85","html_url":"https:\/\/github.com\/afogarty85","followers_url":"https:\/\/api.github.com\/users\/afogarty85\/followers","following_url":"https:\/\/api.github.com\/users\/afogarty85\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/afogarty85\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/afogarty85\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/afogarty85\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/afogarty85\/orgs","repos_url":"https:\/\/api.github.com\/users\/afogarty85\/repos","events_url":"https:\/\/api.github.com\/users\/afogarty85\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/afogarty85\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-21T02:26:37Z","updated_at":"2021-01-22T00:25:50Z","closed_at":"2021-01-22T00:25:50Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to use elastic search to retrieve the indices of items in the dataset in their precise order, given shuffled training indices.\r\n\r\nThe problem I have is that I cannot retrieve reliable results with my data on my first search. I have to run the search **twice** to get the right answer.\r\n\r\nI am indexing data that looks like the following from the HF SQuAD 2.0 data set:\r\n\r\n```\r\n['57318658e6313a140071d02b',\r\n '56f7165e3d8e2e1400e3733a',\r\n '570e2f6e0b85d914000d7d21',\r\n '5727e58aff5b5019007d97d0',\r\n '5a3b5a503ff257001ab8441f',\r\n '57262fab271a42140099d725']\r\n```\r\n\r\n\r\n\r\nTo reproduce the issue, try:\r\n\r\n```\r\nfrom datasets import load_dataset, load_metric\r\nfrom transformers import BertTokenizerFast, BertForQuestionAnswering\r\nfrom elasticsearch import Elasticsearch\r\nimport numpy as np\r\nimport collections\r\nfrom tqdm.auto import tqdm\r\nimport torch\r\n\r\n# from https:\/\/colab.research.google.com\/github\/huggingface\/notebooks\/blob\/master\/examples\/question_answering.ipynb#scrollTo=941LPhDWeYv-\r\ntokenizer = BertTokenizerFast.from_pretrained('bert-base-uncased')\r\nmax_length = 384 # The maximum length of a feature (question and context)\r\ndoc_stride = 128 # The authorized overlap between two part of the context when splitting it is needed.\r\npad_on_right = tokenizer.padding_side == \"right\"\r\nsquad_v2 = True\r\n\r\n# from https:\/\/colab.research.google.com\/github\/huggingface\/notebooks\/blob\/master\/examples\/question_answering.ipynb#scrollTo=941LPhDWeYv-\r\ndef prepare_validation_features(examples):\r\n # Tokenize our examples with truncation and maybe padding, but keep the overflows using a stride. This results\r\n # in one example possible giving several features when a context is long, each of those features having a\r\n # context that overlaps a bit the context of the previous feature.\r\n tokenized_examples = tokenizer(\r\n examples[\"question\" if pad_on_right else \"context\"],\r\n examples[\"context\" if pad_on_right else \"question\"],\r\n truncation=\"only_second\" if pad_on_right else \"only_first\",\r\n max_length=max_length,\r\n stride=doc_stride,\r\n return_overflowing_tokens=True,\r\n return_offsets_mapping=True,\r\n padding=\"max_length\",\r\n )\r\n\r\n # Since one example might give us several features if it has a long context, we need a map from a feature to\r\n # its corresponding example. This key gives us just that.\r\n sample_mapping = tokenized_examples.pop(\"overflow_to_sample_mapping\")\r\n\r\n # We keep the example_id that gave us this feature and we will store the offset mappings.\r\n tokenized_examples[\"example_id\"] = []\r\n\r\n for i in range(len(tokenized_examples[\"input_ids\"])):\r\n # Grab the sequence corresponding to that example (to know what is the context and what is the question).\r\n sequence_ids = tokenized_examples.sequence_ids(i)\r\n context_index = 1 if pad_on_right else 0\r\n\r\n # One example can give several spans, this is the index of the example containing this span of text.\r\n sample_index = sample_mapping[i]\r\n tokenized_examples[\"example_id\"].append(examples[\"id\"][sample_index])\r\n\r\n # Set to None the offset_mapping that are not part of the context so it's easy to determine if a token\r\n # position is part of the context or not.\r\n tokenized_examples[\"offset_mapping\"][i] = [\r\n (list(o) if sequence_ids[k] == context_index else None)\r\n for k, o in enumerate(tokenized_examples[\"offset_mapping\"][i])\r\n ]\r\n\r\n return tokenized_examples\r\n\r\n\r\n\r\n\r\n\r\n# build base examples, features set of training data\r\nshuffled_idx = pd.read_csv('https:\/\/raw.githubusercontent.com\/afogarty85\/temp\/main\/idx.csv')['idx'].to_list()\r\nexamples = load_dataset(\"squad_v2\").shuffle(seed=1)['train']\r\nfeatures = load_dataset(\"squad_v2\").shuffle(seed=1)['train'].map(\r\n prepare_validation_features,\r\n batched=True,\r\n remove_columns=['answers', 'context', 'id', 'question', 'title'])\r\n# reorder features by the training process\r\nfeatures = features.select(indices=shuffled_idx)\r\n# get the example ids to match with the \"example\" data; get unique entries\r\nid_list = list(dict.fromkeys(features['example_id']))\r\n# now search for their index positions in the examples data set; load elastic search\r\nes = Elasticsearch([{'host': 'localhost'}]).ping()\r\n# add an index to the id column for the examples\r\nexamples.add_elasticsearch_index(column='id')\r\n# retrieve the example index\r\nexample_idx_k1 = [examples.search(index_name='id', query=i, k=1).indices for i in id_list]\r\nexample_idx_k1 = [item for sublist in example_idx_k1 for item in sublist]\r\n\r\nexample_idx_k2 = [examples.search(index_name='id', query=i, k=3).indices for i in id_list]\r\nexample_idx_k2 = [item for sublist in example_idx_k2 for item in sublist]\r\n\r\nlen(example_idx_k1) # should be 130319\r\nlen(example_idx_k2) # should be 130319\r\n\r\n#trial 1 lengths:\r\n# k=1: 130314\r\n# k=3: 130319\r\n\r\n# trial 2:\r\n# just run k=3 first: 130310\r\n# try k=1 after k=3: 130319\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1758\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1758\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1757","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1757\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1757\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1757\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1757","id":790466509,"node_id":"MDU6SXNzdWU3OTA0NjY1MDk=","number":1757,"title":"FewRel","user":{"login":"dspoka","id":6183050,"node_id":"MDQ6VXNlcjYxODMwNTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6183050?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dspoka","html_url":"https:\/\/github.com\/dspoka","followers_url":"https:\/\/api.github.com\/users\/dspoka\/followers","following_url":"https:\/\/api.github.com\/users\/dspoka\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dspoka\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dspoka\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dspoka\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dspoka\/orgs","repos_url":"https:\/\/api.github.com\/users\/dspoka\/repos","events_url":"https:\/\/api.github.com\/users\/dspoka\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dspoka\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-01-20T23:56:03Z","updated_at":"2021-03-09T02:52:05Z","closed_at":"2021-03-08T14:34:52Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** FewRel\r\n- **Description:** Large-Scale Supervised Few-Shot Relation Classification Dataset\r\n- **Paper:** @inproceedings{han2018fewrel,\r\n title={FewRel:A Large-Scale Supervised Few-Shot Relation Classification Dataset with State-of-the-Art Evaluation},\r\n author={Han, Xu and Zhu, Hao and Yu, Pengfei and Wang, Ziyun and Yao, Yuan and Liu, Zhiyuan and Sun, Maosong},\r\n booktitle={EMNLP},\r\n year={2018}}\r\n- **Data:** https:\/\/github.com\/ProKil\/FewRel\r\n- **Motivation:** relationship extraction dataset that's been used by some state of the art systems that should be incorporated.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1757\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1757\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1756","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1756\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1756\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1756\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1756","id":790380028,"node_id":"MDU6SXNzdWU3OTAzODAwMjg=","number":1756,"title":"Ccaligned multilingual translation dataset","user":{"login":"flozi00","id":47894090,"node_id":"MDQ6VXNlcjQ3ODk0MDkw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47894090?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/flozi00","html_url":"https:\/\/github.com\/flozi00","followers_url":"https:\/\/api.github.com\/users\/flozi00\/followers","following_url":"https:\/\/api.github.com\/users\/flozi00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/flozi00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/flozi00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/flozi00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/flozi00\/orgs","repos_url":"https:\/\/api.github.com\/users\/flozi00\/repos","events_url":"https:\/\/api.github.com\/users\/flozi00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/flozi00\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-20T22:18:44Z","updated_at":"2021-03-01T10:36:21Z","closed_at":"2021-03-01T10:36:21Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *name of the dataset*\r\n- **Description:** *short description of the dataset (or link to social media or blog post)*\r\n- CCAligned consists of parallel or comparable web-document pairs in 137 languages aligned with English. These web-document pairs were constructed by performing language identification on raw web-documents, and ensuring corresponding language codes were corresponding in the URLs of web documents. This pattern matching approach yielded more than 100 million aligned documents paired with English. Recognizing that each English document was often aligned to mulitple documents in different target language, we can join on English documents to obtain aligned documents that directly pair two non-English documents (e.g., Arabic-French).\r\n- **Paper:** *link to the dataset paper if available*\r\n- https:\/\/www.aclweb.org\/anthology\/2020.emnlp-main.480.pdf\r\n- **Data:** *link to the Github repository or current dataset location*\r\n- http:\/\/www.statmt.org\/cc-aligned\/\r\n- **Motivation:** *what are some good reasons to have this dataset*\r\n- The authors says it's an high quality dataset.\r\n- it's pretty large and includes many language pairs. It could be interesting training mt5 on this task.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1756\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1756\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1755","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1755\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1755\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1755\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1755","id":790324734,"node_id":"MDU6SXNzdWU3OTAzMjQ3MzQ=","number":1755,"title":"Using select\/reordering datasets slows operations down immensely","user":{"login":"afogarty85","id":49048309,"node_id":"MDQ6VXNlcjQ5MDQ4MzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49048309?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/afogarty85","html_url":"https:\/\/github.com\/afogarty85","followers_url":"https:\/\/api.github.com\/users\/afogarty85\/followers","following_url":"https:\/\/api.github.com\/users\/afogarty85\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/afogarty85\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/afogarty85\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/afogarty85\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/afogarty85\/orgs","repos_url":"https:\/\/api.github.com\/users\/afogarty85\/repos","events_url":"https:\/\/api.github.com\/users\/afogarty85\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/afogarty85\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-20T21:12:12Z","updated_at":"2021-01-20T22:03:39Z","closed_at":"2021-01-20T22:03:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am using portions of HF's helpful work in preparing \/ scoring the SQuAD 2.0 data. The problem I have is that after using `select` to re-ordering the dataset, computations slow down immensely where the total scoring process on 131k training examples would take maybe 3 minutes, now take over an hour.\r\n\r\nThe below example should be reproducible and I have ran myself down this path because I want to use HF's scoring functions and helpful data preparation, but use my own trainer. The training process uses shuffle and therefore the order I trained on no longer matches the original data set order. So, to score my results correctly, the original data set needs to match the order of the training. This requires that I: (1) collect the index for each row of data emitted during training, and (2) use this index information to re-order the datasets correctly so the orders match when I go to score.\r\n\r\n\r\nThe problem is, the dataset class starts performing very poorly as soon as you start manipulating its order by immense magnitudes.\r\n\r\n\r\n\r\n```\r\nfrom datasets import load_dataset, load_metric\r\nfrom transformers import BertTokenizerFast, BertForQuestionAnswering\r\nfrom elasticsearch import Elasticsearch\r\nimport numpy as np\r\nimport collections\r\nfrom tqdm.auto import tqdm\r\nimport torch\r\n\r\n# from https:\/\/colab.research.google.com\/github\/huggingface\/notebooks\/blob\/master\/examples\/question_answering.ipynb#scrollTo=941LPhDWeYv-\r\ntokenizer = BertTokenizerFast.from_pretrained('bert-base-uncased')\r\nmax_length = 384 # The maximum length of a feature (question and context)\r\ndoc_stride = 128 # The authorized overlap between two part of the context when splitting it is needed.\r\npad_on_right = tokenizer.padding_side == \"right\"\r\nsquad_v2 = True\r\n\r\n# from https:\/\/colab.research.google.com\/github\/huggingface\/notebooks\/blob\/master\/examples\/question_answering.ipynb#scrollTo=941LPhDWeYv-\r\ndef prepare_validation_features(examples):\r\n # Tokenize our examples with truncation and maybe padding, but keep the overflows using a stride. This results\r\n # in one example possible giving several features when a context is long, each of those features having a\r\n # context that overlaps a bit the context of the previous feature.\r\n tokenized_examples = tokenizer(\r\n examples[\"question\" if pad_on_right else \"context\"],\r\n examples[\"context\" if pad_on_right else \"question\"],\r\n truncation=\"only_second\" if pad_on_right else \"only_first\",\r\n max_length=max_length,\r\n stride=doc_stride,\r\n return_overflowing_tokens=True,\r\n return_offsets_mapping=True,\r\n padding=\"max_length\",\r\n )\r\n\r\n # Since one example might give us several features if it has a long context, we need a map from a feature to\r\n # its corresponding example. This key gives us just that.\r\n sample_mapping = tokenized_examples.pop(\"overflow_to_sample_mapping\")\r\n\r\n # We keep the example_id that gave us this feature and we will store the offset mappings.\r\n tokenized_examples[\"example_id\"] = []\r\n\r\n for i in range(len(tokenized_examples[\"input_ids\"])):\r\n # Grab the sequence corresponding to that example (to know what is the context and what is the question).\r\n sequence_ids = tokenized_examples.sequence_ids(i)\r\n context_index = 1 if pad_on_right else 0\r\n\r\n # One example can give several spans, this is the index of the example containing this span of text.\r\n sample_index = sample_mapping[i]\r\n tokenized_examples[\"example_id\"].append(examples[\"id\"][sample_index])\r\n\r\n # Set to None the offset_mapping that are not part of the context so it's easy to determine if a token\r\n # position is part of the context or not.\r\n tokenized_examples[\"offset_mapping\"][i] = [\r\n (list(o) if sequence_ids[k] == context_index else None)\r\n for k, o in enumerate(tokenized_examples[\"offset_mapping\"][i])\r\n ]\r\n\r\n return tokenized_examples\r\n\r\n# from https:\/\/colab.research.google.com\/github\/huggingface\/notebooks\/blob\/master\/examples\/question_answering.ipynb#scrollTo=941LPhDWeYv-\r\ndef postprocess_qa_predictions(examples, features, starting_logits, ending_logits, n_best_size = 20, max_answer_length = 30):\r\n all_start_logits, all_end_logits = starting_logits, ending_logits\r\n # Build a map example to its corresponding features.\r\n example_id_to_index = {k: i for i, k in enumerate(examples[\"id\"])}\r\n features_per_example = collections.defaultdict(list)\r\n\r\n for i, feature in enumerate(features):\r\n features_per_example[example_id_to_index[feature[\"example_id\"]]].append(i)\r\n\r\n # The dictionaries we have to fill.\r\n predictions = collections.OrderedDict()\r\n\r\n # Logging.\r\n print(f\"Post-processing {len(examples)} example predictions split into {len(features)} features.\")\r\n\r\n # Let's loop over all the examples!\r\n for example_index, example in enumerate(tqdm(examples)):\r\n # Those are the indices of the features associated to the current example.\r\n feature_indices = features_per_example[example_index]\r\n\r\n min_null_score = None # Only used if squad_v2 is True.\r\n valid_answers = []\r\n\r\n context = example[\"context\"]\r\n # Looping through all the features associated to the current example.\r\n for feature_index in feature_indices:\r\n\r\n # We grab the predictions of the model for this feature.\r\n start_logits = all_start_logits[feature_index]\r\n end_logits = all_end_logits[feature_index]\r\n # This is what will allow us to map some the positions in our logits to span of texts in the original\r\n # context.\r\n offset_mapping = features[feature_index][\"offset_mapping\"]\r\n\r\n # Update minimum null prediction.\r\n cls_index = features[feature_index][\"input_ids\"].index(tokenizer.cls_token_id)\r\n feature_null_score = start_logits[cls_index] + end_logits[cls_index]\r\n if min_null_score is None or min_null_score < feature_null_score:\r\n min_null_score = feature_null_score\r\n\r\n # Go through all possibilities for the `n_best_size` greater start and end logits.\r\n start_indexes = np.argsort(start_logits)[-1 : -n_best_size - 1 : -1].tolist()\r\n end_indexes = np.argsort(end_logits)[-1 : -n_best_size - 1 : -1].tolist()\r\n for start_index in start_indexes:\r\n for end_index in end_indexes:\r\n # Don't consider out-of-scope answers, either because the indices are out of bounds or correspond\r\n # to part of the input_ids that are not in the context.\r\n if (\r\n start_index >= len(offset_mapping)\r\n or end_index >= len(offset_mapping)\r\n or offset_mapping[start_index] is None\r\n or offset_mapping[end_index] is None\r\n ):\r\n continue\r\n # Don't consider answers with a length that is either < 0 or > max_answer_length.\r\n if end_index < start_index or end_index - start_index + 1 > max_answer_length:\r\n continue\r\n\r\n start_char = offset_mapping[start_index][0]\r\n end_char = offset_mapping[end_index][1]\r\n valid_answers.append(\r\n {\r\n \"score\": start_logits[start_index] + end_logits[end_index],\r\n \"text\": context[start_char: end_char]\r\n }\r\n )\r\n\r\n\r\n if len(valid_answers) > 0:\r\n best_answer = sorted(valid_answers, key=lambda x: x[\"score\"], reverse=True)[0]\r\n else:\r\n # In the very rare edge case we have not a single non-null prediction, we create a fake prediction to avoid\r\n # failure.\r\n best_answer = {\"text\": \"\", \"score\": 0.0}\r\n\r\n # Let's pick our final answer: the best one or the null answer (only for squad_v2)\r\n if not squad_v2:\r\n predictions[example[\"id\"]] = best_answer[\"text\"]\r\n else:\r\n answer = best_answer[\"text\"] if best_answer[\"score\"] > min_null_score else \"\"\r\n predictions[example[\"id\"]] = answer\r\n\r\n return predictions\r\n\r\n\r\n\r\n# build base examples, features from training data\r\nexamples = load_dataset(\"squad_v2\").shuffle(seed=5)['train']\r\nfeatures = load_dataset(\"squad_v2\").shuffle(seed=5)['train'].map(\r\n prepare_validation_features,\r\n batched=True,\r\n remove_columns=['answers', 'context', 'id', 'question', 'title'])\r\n\r\n# sim some shuffled training indices that we want to use to re-order the data to compare how we did\r\nshuffle_idx = np.arange(0, 131754)\r\nnp.random.shuffle(shuffle_idx)\r\n# create a new dataset with rows selected following the training shuffle\r\nfeatures = features.select(indices=shuffle_idx)\r\n# get unique example ids to match with the \"example\" data\r\nid_list = list(dict.fromkeys(features['example_id']))\r\n# now search for their index positions; load elastic search\r\nes = Elasticsearch([{'host': 'localhost'}]).ping()\r\n# add an index to the id column for the examples\r\nexamples.add_elasticsearch_index(column='id')\r\n# search the examples for their index position\r\nexample_idx = [examples.search(index_name='id', query=i, k=1).indices for i in id_list]\r\n# drop the elastic search\r\nexamples.drop_index(index_name='id')\r\n# put examples in the right order\r\nexamples = examples.select(indices=example_idx)\r\n\r\n# generate some fake data\r\nlogits = {'starting_logits': torch.randn(131754, 384), 'ending_logits': torch.randn(131754, 384)}\r\n\r\n\r\ndef score_squad(logits, n_best_size, max_answer):\r\n # proceed with QA calculation\r\n final_predictions = postprocess_qa_predictions(examples=examples,\r\n features=features,\r\n starting_logits=logits['starting_logits'],\r\n ending_logits=logits['ending_logits'],\r\n n_best_size=20,\r\n max_answer_length=30)\r\n metric = load_metric(\"squad_v2\")\r\n formatted_predictions = [{\"id\": k, \"prediction_text\": v, \"no_answer_probability\": 0.0} for k, v in final_predictions.items()]\r\n references = [{\"id\": ex[\"id\"], \"answers\": ex[\"answers\"]} for ex in examples]\r\n metrics = metric.compute(predictions=formatted_predictions, references=references)\r\n return metrics\r\n\r\nmetrics = score_squad(logits, n_best_size=20, max_answer=30)\r\n```\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1755\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1755\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1754","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1754\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1754\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1754\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1754","id":789881730,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU4MTU5NjEw","number":1754,"title":"Use a config id in the cache directory names for custom configs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-20T11:11:00Z","updated_at":"2021-01-25T09:12:07Z","closed_at":"2021-01-25T09:12:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1754","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1754","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1754.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1754.patch","merged_at":"2021-01-25T09:12:06Z"},"body":"As noticed by @JetRunner there was some issues when trying to generate a dataset using a custom config that is based on an existing config.\r\n\r\nFor example in the following code the `mnli_custom` would reuse the cache used to create `mnli` instead of generating a new dataset with the new label classes:\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nmnli = load_dataset(\"glue\", \"mnli\")\r\nmnli_custom = load_dataset(\"glue\", \"mnli\", label_classes=[\"contradiction\", \"entailment\", \"neutral\"])\r\n```\r\n\r\nI fixed that by extending the cache directory definition of a dataset that is being generated.\r\nInstead of using the config name in the cache directory name, I switched to using a `config_id`.\r\n\r\nBy default it is equal to the config name.\r\nHowever the name of a config is not sufficent to have a unique identifier for the dataset being generated since it doesn't take into account:\r\n- the config kwargs that can be used to overwrite attributes\r\n- the custom features used to write the dataset\r\n- the data_files for json\/text\/csv\/pandas datasets\r\n\r\nTherefore the config id is just the config name with an optional suffix based on these.\r\n\r\nIn particular taking into account the config kwargs fixes the issue with the `label_classes` above.\r\n\r\nI completed the current test cases by adding the case that was missing: overwriting an already existing config.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1754\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1754\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1753","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1753\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1753\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1753\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1753","id":789867685,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU4MTQ3Njkx","number":1753,"title":"fix comet citations","user":{"login":"ricardorei","id":17256847,"node_id":"MDQ6VXNlcjE3MjU2ODQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17256847?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ricardorei","html_url":"https:\/\/github.com\/ricardorei","followers_url":"https:\/\/api.github.com\/users\/ricardorei\/followers","following_url":"https:\/\/api.github.com\/users\/ricardorei\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ricardorei\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ricardorei\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ricardorei\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ricardorei\/orgs","repos_url":"https:\/\/api.github.com\/users\/ricardorei\/repos","events_url":"https:\/\/api.github.com\/users\/ricardorei\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ricardorei\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-20T10:52:38Z","updated_at":"2021-01-20T14:39:30Z","closed_at":"2021-01-20T14:39:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1753","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1753","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1753.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1753.patch","merged_at":"2021-01-20T14:39:30Z"},"body":"I realized COMET citations were not showing in the hugging face metrics page:\r\n\r\n\"Screenshot\r\n\r\nThis pull request is intended to fix that.\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1753\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1753\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1752","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1752\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1752\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1752\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1752","id":789822459,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU4MTA5NTA5","number":1752,"title":"COMET metric citation","user":{"login":"ricardorei","id":17256847,"node_id":"MDQ6VXNlcjE3MjU2ODQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17256847?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ricardorei","html_url":"https:\/\/github.com\/ricardorei","followers_url":"https:\/\/api.github.com\/users\/ricardorei\/followers","following_url":"https:\/\/api.github.com\/users\/ricardorei\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ricardorei\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ricardorei\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ricardorei\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ricardorei\/orgs","repos_url":"https:\/\/api.github.com\/users\/ricardorei\/repos","events_url":"https:\/\/api.github.com\/users\/ricardorei\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ricardorei\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-20T09:54:43Z","updated_at":"2021-01-20T10:27:07Z","closed_at":"2021-01-20T10:25:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1752","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1752","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1752.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1752.patch","merged_at":null},"body":"In my last pull request to add COMET metric, the citations where not following the usual \"format\". Because of that they where not correctly displayed on the website: \r\n\r\n\"Screenshot\r\n\r\nThis pull request is only intended to fix that.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1752\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1752\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1751","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1751\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1751\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1751\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1751","id":789232980,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU3NjA1ODE2","number":1751,"title":"Updated README for the Social Bias Frames dataset","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-19T17:53:00Z","updated_at":"2021-01-20T14:56:52Z","closed_at":"2021-01-20T14:56:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1751","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1751","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1751.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1751.patch","merged_at":"2021-01-20T14:56:52Z"},"body":"See the updated card at https:\/\/github.com\/mcmillanmajora\/datasets\/tree\/add-SBIC-card\/datasets\/social_bias_frames. I incorporated information from the [SBIC data statement](https:\/\/homes.cs.washington.edu\/~msap\/social-bias-frames\/DATASTATEMENT.html), paper, and the corpus README file included with the dataset download.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1751\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1751\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1750","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1750\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1750\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1750\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1750","id":788668085,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU3MTM1MzM1","number":1750,"title":"Fix typo in README.md of cnn_dailymail","user":{"login":"forest1988","id":2755894,"node_id":"MDQ6VXNlcjI3NTU4OTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2755894?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/forest1988","html_url":"https:\/\/github.com\/forest1988","followers_url":"https:\/\/api.github.com\/users\/forest1988\/followers","following_url":"https:\/\/api.github.com\/users\/forest1988\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/forest1988\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/forest1988\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/forest1988\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/forest1988\/orgs","repos_url":"https:\/\/api.github.com\/users\/forest1988\/repos","events_url":"https:\/\/api.github.com\/users\/forest1988\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/forest1988\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-19T03:06:05Z","updated_at":"2021-01-19T11:07:29Z","closed_at":"2021-01-19T09:48:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1750","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1750","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1750.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1750.patch","merged_at":"2021-01-19T09:48:43Z"},"body":"When I read the README.md of `CNN\/DailyMail Dataset`, there seems to be a typo `CCN`.\r\n\r\nI am afraid this is a trivial matter, but I would like to make a suggestion for revision.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1750\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1750\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1749","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1749\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1749\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1749\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1749","id":788476639,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU2OTgxMDc5","number":1749,"title":"Added metadata and correct splits for swda.","user":{"login":"gmihaila","id":22454783,"node_id":"MDQ6VXNlcjIyNDU0Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22454783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gmihaila","html_url":"https:\/\/github.com\/gmihaila","followers_url":"https:\/\/api.github.com\/users\/gmihaila\/followers","following_url":"https:\/\/api.github.com\/users\/gmihaila\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gmihaila\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gmihaila\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gmihaila\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gmihaila\/orgs","repos_url":"https:\/\/api.github.com\/users\/gmihaila\/repos","events_url":"https:\/\/api.github.com\/users\/gmihaila\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gmihaila\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-18T18:36:32Z","updated_at":"2021-01-29T19:35:52Z","closed_at":"2021-01-29T18:38:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1749","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1749","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1749.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1749.patch","merged_at":"2021-01-29T18:38:08Z"},"body":"Switchboard Dialog Act Corpus\r\n\r\nI made some changes following @bhavitvyamalik recommendation in #1678:\r\n\r\n* Contains all metadata.\r\n* Used official implementation from the [\/swda](https:\/\/github.com\/cgpotts\/swda) repo.\r\n* Add official train and test splits used in [Stolcke et al. (2000)](https:\/\/web.stanford.edu\/~jurafsky\/ws97) and validation split used in [Probabilistic-RNN-DA-Classifier](https:\/\/github.com\/NathanDuran\/Probabilistic-RNN-DA-Classifier).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1749\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":2,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1749\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1748","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1748\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1748\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1748\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1748","id":788431642,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU2OTQ0NDEx","number":1748,"title":"add Stuctured Argument Extraction for Korean dataset","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-18T17:14:19Z","updated_at":"2021-09-17T16:53:18Z","closed_at":"2021-01-19T11:26:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1748","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1748","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1748.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1748.patch","merged_at":"2021-01-19T11:26:58Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1748\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1748\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1747","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1747\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1747\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1747\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1747","id":788299775,"node_id":"MDU6SXNzdWU3ODgyOTk3NzU=","number":1747,"title":"datasets slicing with seed ","user":{"login":"ghost","id":10137,"node_id":"MDQ6VXNlcjEwMTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghost","html_url":"https:\/\/github.com\/ghost","followers_url":"https:\/\/api.github.com\/users\/ghost\/followers","following_url":"https:\/\/api.github.com\/users\/ghost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghost\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghost\/repos","events_url":"https:\/\/api.github.com\/users\/ghost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghost\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-18T14:08:55Z","updated_at":"2021-01-18T14:45:34Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI need to slice a dataset with random seed, I looked into documentation here https:\/\/huggingface.co\/docs\/datasets\/splits.html \r\nI could not find a seed option, could you assist me please how I can get a slice for different seeds?\r\nthank you.\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1747\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1747\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1746","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1746\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1746\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1746\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1746","id":788188184,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU2NzQxMjIw","number":1746,"title":"Fix release conda worflow","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-18T11:29:10Z","updated_at":"2021-01-18T11:31:24Z","closed_at":"2021-01-18T11:31:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1746","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1746","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1746.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1746.patch","merged_at":"2021-01-18T11:31:23Z"},"body":"The current workflow yaml file is not valid according to https:\/\/github.com\/huggingface\/datasets\/actions\/runs\/487638110","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1746\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1746\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1745","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1745\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1745\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1745\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1745","id":787838256,"node_id":"MDU6SXNzdWU3ODc4MzgyNTY=","number":1745,"title":"difference between wsc and wsc.fixed for superglue","user":{"login":"ghost","id":10137,"node_id":"MDQ6VXNlcjEwMTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghost","html_url":"https:\/\/github.com\/ghost","followers_url":"https:\/\/api.github.com\/users\/ghost\/followers","following_url":"https:\/\/api.github.com\/users\/ghost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghost\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghost\/repos","events_url":"https:\/\/api.github.com\/users\/ghost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghost\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-18T00:50:19Z","updated_at":"2021-01-18T11:02:43Z","closed_at":"2021-01-18T00:59:34Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI see two versions of wsc in superglue, and I am not sure what is the differences and which one is the original one. could you help to discuss the differences? thanks @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1745\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1745\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1744","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1744\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1744\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1744\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1744","id":787649811,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU2MzA0MjU4","number":1744,"title":"Add missing \"brief\" entries to reuters","user":{"login":"jbragg","id":2238344,"node_id":"MDQ6VXNlcjIyMzgzNDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2238344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jbragg","html_url":"https:\/\/github.com\/jbragg","followers_url":"https:\/\/api.github.com\/users\/jbragg\/followers","following_url":"https:\/\/api.github.com\/users\/jbragg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jbragg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jbragg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jbragg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jbragg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jbragg\/repos","events_url":"https:\/\/api.github.com\/users\/jbragg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jbragg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-17T07:58:49Z","updated_at":"2021-01-18T11:26:09Z","closed_at":"2021-01-18T11:26:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1744","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1744","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1744.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1744.patch","merged_at":"2021-01-18T11:26:09Z"},"body":"This brings the number of examples for ModApte to match the stated `Training set (9,603 docs)...Test Set (3,299 docs)`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1744\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1744\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1743","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1743\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1743\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1743\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1743","id":787631412,"node_id":"MDU6SXNzdWU3ODc2MzE0MTI=","number":1743,"title":"Issue while Creating Custom Metric","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-17T07:01:14Z","updated_at":"2021-01-22T16:45:00Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi Team,\r\n\r\nI am trying to create a custom metric for my training as follows, where f1 is my own metric:\r\n\r\n```python\r\n def _info(self):\r\n # TODO: Specifies the datasets.MetricInfo object\r\n return datasets.MetricInfo(\r\n # This is the description that will appear on the metrics page.\r\n description=_DESCRIPTION,\r\n citation=_CITATION,\r\n inputs_description=_KWARGS_DESCRIPTION,\r\n # This defines the format of each prediction and reference\r\n features = datasets.Features({'predictions':datasets.Sequence(datasets.Value(\"int32\")), \"references\": datasets.Sequence(datasets.Value(\"int32\")),\"offset_mapping\":datasets.Sequence(datasets.Value(\"int32\")),'text':datasets.Sequence(datasets.Value('string')),\"ground\":datasets.Sequence(datasets.Value(\"int32\")),}),\r\n # Homepage of the metric for documentation\r\n homepage=\"http:\/\/metric.homepage\",\r\n # Additional links to the codebase or references\r\n codebase_urls=[\"http:\/\/github.com\/path\/to\/codebase\/of\/new_metric\"],\r\n reference_urls=[\"http:\/\/path.to.reference.url\/new_metric\"]\r\n )\r\n\r\n def _compute(self,predictions,references,text,offset_mapping,spans):\r\n\r\n pred_spans = []\r\n\r\n for i,preds in enumerate(predictions):\r\n current_preds = []\r\n for j,token_preds in enumerate(preds):\r\n if (preds>0.5):\r\n current_preds+=list(range(offset_mapping[i][j][0],offset_mapping[i][j][1]))\r\n pred_spans.append(current_spans)\r\n \r\n return {\r\n \"Token Wise F1\": f1_score(references,predictions,labels=[0,1]),\r\n \"Offset Wise F1\": np.mean([f1(preds,gold) for preds,fold in zip(pred_spans,ground)])\r\n }\r\n\r\n```\r\n\r\nI believe this is not correct. But that's not the issue I am facing right now. I get this error :\r\n```python\r\n---------------------------------------------------------------------------\r\nValueError Traceback (most recent call last)\r\n in ()\r\n----> 1 new_metric.compute(predictions=inputs[\"labels\"],references=inputs[\"labels\"], text=inputs[\"text\"], offset_mapping=inputs[\"offset_mapping\"],ground=inputs[\"ground\"] )\r\n\r\n2 frames\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/features.py in encode_batch(self, batch)\r\n 802 encoded_batch = {}\r\n 803 if set(batch) != set(self):\r\n--> 804 print(batch)\r\n 805 print(self)\r\n 806 raise ValueError(\"Column mismatch between batch {} and features {}\".format(set(batch), set(self)))\r\n\r\nValueError: Column mismatch between batch {'references', 'predictions'} and features {'ground', 'predictions', 'offset_mapping', 'text', 'references'}\r\n```\r\nOn checking the features.py file, I see the call is made from add_batch() in metrics.py which only takes in predictions and references.\r\n\r\nHow do I make my custom metric work? Will it work with a trainer even if I am able to make this metric work?\r\n\r\nThanks,\r\nGunjan","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1743\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1743\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1742","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1742\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1742\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1742\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1742","id":787623640,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU2MjgyMDYw","number":1742,"title":"Add GLUE Compat (compatible with transformers<3.5.0)","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-17T05:54:25Z","updated_at":"2021-03-29T12:43:30Z","closed_at":"2021-03-29T12:43:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1742","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1742","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1742.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1742.patch","merged_at":null},"body":"Link to our discussion on Slack (HF internal)\r\nhttps:\/\/huggingface.slack.com\/archives\/C014N4749J9\/p1609668119337400\r\n\r\nThe next step is to add a compatible option in the new `run_glue.py`\r\n\r\nI duplicated `glue` and made the following changes:\r\n1. Change the name to `glue_compat`.\r\n2. Change the label assignments for MNLI and AX.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1742\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1742\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1741","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1741\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1741\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1741\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1741","id":787327060,"node_id":"MDU6SXNzdWU3ODczMjcwNjA=","number":1741,"title":"error when run fine_tuning on text_classification","user":{"login":"XiaoYang66","id":43234824,"node_id":"MDQ6VXNlcjQzMjM0ODI0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43234824?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/XiaoYang66","html_url":"https:\/\/github.com\/XiaoYang66","followers_url":"https:\/\/api.github.com\/users\/XiaoYang66\/followers","following_url":"https:\/\/api.github.com\/users\/XiaoYang66\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/XiaoYang66\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/XiaoYang66\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/XiaoYang66\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/XiaoYang66\/orgs","repos_url":"https:\/\/api.github.com\/users\/XiaoYang66\/repos","events_url":"https:\/\/api.github.com\/users\/XiaoYang66\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/XiaoYang66\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-16T02:23:19Z","updated_at":"2021-01-16T02:39:28Z","closed_at":"2021-01-16T02:39:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"dataset:sem_eval_2014_task_1\r\npretrained_model:bert-base-uncased\r\n\r\nerror description:\r\nwhen i use these resoruce to train fine_tuning a text_classification on sem_eval_2014_task_1,there always be some problem(when i use other dataset ,there exist the error too). And i followed the colab code (url:https:\/\/colab.research.google.com\/github\/huggingface\/notebooks\/blob\/master\/examples\/text_classification.ipynb#scrollTo=TlqNaB8jIrJW).\r\n\r\n\r\nthe error is like this :\r\n`File \"train.py\", line 69, in \r\n trainer.train()\r\n File \"\/home\/projects\/anaconda3\/envs\/calibration\/lib\/python3.7\/site-packages\/transformers\/trainer.py\", line 784, in train\r\n for step, inputs in enumerate(epoch_iterator):\r\n File \"\/home\/projects\/anaconda3\/envs\/calibration\/lib\/python3.7\/site-packages\/torch\/utils\/data\/dataloader.py\", line 435, in __next__\r\n data = self._next_data()\r\n File \"\/home\/projects\/anaconda3\/envs\/calibration\/lib\/python3.7\/site-packages\/torch\/utils\/data\/dataloader.py\", line 475, in _next_data\r\n data = self._dataset_fetcher.fetch(index) # may raise StopIteration\r\n File \"\/home\/projects\/anaconda3\/envs\/calibration\/lib\/python3.7\/site-packages\/torch\/utils\/data\/_utils\/fetch.py\", line 44, in fetch\r\n data = [self.dataset[idx] for idx in possibly_batched_index]\r\n File \"\/home\/projects\/anaconda3\/envs\/calibration\/lib\/python3.7\/site-packages\/torch\/utils\/data\/_utils\/fetch.py\", line 44, in \r\n data = [self.dataset[idx] for idx in possibly_batched_index]\r\nKeyError: 2`\r\n\r\nthis is my code :\r\n```dataset_name = 'sem_eval_2014_task_1'\r\nnum_labels_size = 3\r\nbatch_size = 4\r\nmodel_checkpoint = 'bert-base-uncased'\r\nnumber_train_epoch = 5\r\n\r\ndef tokenize(batch):\r\nreturn tokenizer(batch['premise'], batch['hypothesis'], truncation=True, )\r\n\r\ndef compute_metrics(pred):\r\nlabels = pred.label_ids\r\npreds = pred.predictions.argmax(-1)\r\nprecision, recall, f1, _ = precision_recall_fscore_support(labels, preds, average='micro')\r\nacc = accuracy_score(labels, preds)\r\nreturn {\r\n'accuracy': acc,\r\n'f1': f1,\r\n'precision': precision,\r\n'recall': recall\r\n}\r\n\r\nmodel = BertForSequenceClassification.from_pretrained(model_checkpoint, num_labels=num_labels_size)\r\ntokenizer = BertTokenizerFast.from_pretrained(model_checkpoint, use_fast=True)\r\n\r\ntrain_dataset = load_dataset(dataset_name, split='train')\r\ntest_dataset = load_dataset(dataset_name, split='test')\r\n\r\ntrain_encoded_dataset = train_dataset.map(tokenize, batched=True)\r\ntest_encoded_dataset = test_dataset.map(tokenize, batched=True)\r\n\r\nargs = TrainingArguments(\r\noutput_dir='.\/results',\r\nevaluation_strategy=\"epoch\",\r\nlearning_rate=2e-5,\r\nper_device_train_batch_size=batch_size,\r\nper_device_eval_batch_size=batch_size,\r\nnum_train_epochs=number_train_epoch,\r\nweight_decay=0.01,\r\ndo_predict=True,\r\n)\r\ntrainer = Trainer(\r\nmodel=model,\r\nargs=args,\r\ncompute_metrics=compute_metrics,\r\ntrain_dataset=train_encoded_dataset,\r\neval_dataset=test_encoded_dataset,\r\ntokenizer=tokenizer\r\n)\r\n\r\ntrainer.train()\r\ntrainer.evaluate()\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1741\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1741\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1740","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1740\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1740\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1740\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1740","id":787264605,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU2MDA5NjM1","number":1740,"title":"add id_liputan6 dataset","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-15T22:58:34Z","updated_at":"2021-01-20T13:41:26Z","closed_at":"2021-01-20T13:41:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1740","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1740","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1740.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1740.patch","merged_at":"2021-01-20T13:41:26Z"},"body":"id_liputan6 is a large-scale Indonesian summarization dataset. The articles were harvested from an online news portal, and obtain 215,827 document-summary pairs: https:\/\/arxiv.org\/abs\/2011.00679","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1740\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1740\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1739","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1739\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1739\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1739\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1739","id":787219138,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU1OTY5Njgx","number":1739,"title":"fixes and improvements for the WebNLG loader","user":{"login":"Shimorina","id":9607332,"node_id":"MDQ6VXNlcjk2MDczMzI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9607332?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Shimorina","html_url":"https:\/\/github.com\/Shimorina","followers_url":"https:\/\/api.github.com\/users\/Shimorina\/followers","following_url":"https:\/\/api.github.com\/users\/Shimorina\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Shimorina\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Shimorina\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Shimorina\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Shimorina\/orgs","repos_url":"https:\/\/api.github.com\/users\/Shimorina\/repos","events_url":"https:\/\/api.github.com\/users\/Shimorina\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Shimorina\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-01-15T21:45:23Z","updated_at":"2021-01-29T14:34:06Z","closed_at":"2021-01-29T10:53:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1739","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1739","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1739.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1739.patch","merged_at":"2021-01-29T10:53:03Z"},"body":"- fixes test sets loading in v3.0\r\n- adds additional fields for v3.0_ru\r\n- adds info to the WebNLG data card","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1739\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1739\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1738","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1738\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1738\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1738\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1738","id":786068440,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU0OTk2NDU4","number":1738,"title":"Conda support","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-01-14T15:11:25Z","updated_at":"2021-01-15T10:08:20Z","closed_at":"2021-01-15T10:08:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1738","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1738","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1738.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1738.patch","merged_at":"2021-01-15T10:08:18Z"},"body":"Will push a new version on anaconda cloud every time a tag starting with `v` is pushed (like `v1.2.2`).\r\n\r\nWill appear here: https:\/\/anaconda.org\/huggingface\/datasets\r\n\r\nDepends on `conda-forge` for now, so the following is required for installation:\r\n\r\n```\r\nconda install -c huggingface -c conda-forge datasets\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1738\/reactions","total_count":4,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":4,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1738\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1737","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1737\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1737\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1737\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1737","id":785606286,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU0NjA2ODg5","number":1737,"title":"update link in TLC to be github links","user":{"login":"chameleonTK","id":6429850,"node_id":"MDQ6VXNlcjY0Mjk4NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6429850?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chameleonTK","html_url":"https:\/\/github.com\/chameleonTK","followers_url":"https:\/\/api.github.com\/users\/chameleonTK\/followers","following_url":"https:\/\/api.github.com\/users\/chameleonTK\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chameleonTK\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chameleonTK\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chameleonTK\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chameleonTK\/orgs","repos_url":"https:\/\/api.github.com\/users\/chameleonTK\/repos","events_url":"https:\/\/api.github.com\/users\/chameleonTK\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chameleonTK\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-14T02:49:21Z","updated_at":"2021-01-14T10:25:24Z","closed_at":"2021-01-14T10:25:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1737","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1737","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1737.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1737.patch","merged_at":"2021-01-14T10:25:24Z"},"body":"Base on this issue https:\/\/github.com\/huggingface\/datasets\/issues\/1064, I can now use the official links.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1737\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1737\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1736","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1736\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1736\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1736\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1736","id":785433854,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU0NDYyNjYw","number":1736,"title":"Adjust BrWaC dataset features name","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-13T20:39:04Z","updated_at":"2021-01-14T10:29:38Z","closed_at":"2021-01-14T10:29:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1736","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1736","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1736.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1736.patch","merged_at":"2021-01-14T10:29:38Z"},"body":"I added this dataset some days ago, and today I used it to train some models and realized that the names of the features aren't so good.\r\n\r\nLooking at the current features hierarchy, we have \"paragraphs\" with a list of \"sentences\" with a list of \"sentences?!\". But the actual hierarchy is a \"text\" with a list of \"paragraphs\" with a list of \"sentences\".\r\n\r\nI confused myself trying to use the dataset with these names. So I think it's better to change it.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1736\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1736\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1735","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1735\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1735\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1735\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1735","id":785184740,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU0MjUzMDcw","number":1735,"title":"Update add new dataset template","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-13T15:08:09Z","updated_at":"2021-01-14T15:16:01Z","closed_at":"2021-01-14T15:16:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1735","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1735","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1735.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1735.patch","merged_at":"2021-01-14T15:16:00Z"},"body":"This PR fixes a few typos in the \"Add new dataset template\" and clarifies a bit what to do for the dummy data creation when the `auto_generate` flag can't work.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1735\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1735\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1734","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1734\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1734\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1734\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1734","id":784956707,"node_id":"MDExOlB1bGxSZXF1ZXN0NTU0MDYxMzMz","number":1734,"title":"Fix empty token bug for `thainer` and `lst20`","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-13T09:55:09Z","updated_at":"2021-01-14T10:42:18Z","closed_at":"2021-01-14T10:42:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1734","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1734","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1734.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1734.patch","merged_at":"2021-01-14T10:42:18Z"},"body":"add a condition to check if tokens exist before yielding in `thainer` and `lst20`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1734\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1734\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1733","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1733\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1733\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1733\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1733","id":784903002,"node_id":"MDU6SXNzdWU3ODQ5MDMwMDI=","number":1733,"title":"connection issue with glue, what is the data url for glue? ","user":{"login":"ghost","id":10137,"node_id":"MDQ6VXNlcjEwMTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghost","html_url":"https:\/\/github.com\/ghost","followers_url":"https:\/\/api.github.com\/users\/ghost\/followers","following_url":"https:\/\/api.github.com\/users\/ghost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghost\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghost\/repos","events_url":"https:\/\/api.github.com\/users\/ghost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghost\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-13T08:37:40Z","updated_at":"2021-08-04T18:13:55Z","closed_at":"2021-08-04T18:13:55Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nmy codes sometimes fails due to connection issue with glue, could you tell me how I can have the URL datasets library is trying to read GLUE from to test the machines I am working on if there is an issue on my side or not\r\nthanks ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1733\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1733\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1732","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1732\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1732\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1732\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1732","id":784874490,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUzOTkzNTAx","number":1732,"title":"[GEM Dataset] Added TurkCorpus, an evaluation dataset for sentence simplification.","user":{"login":"mounicam","id":11708999,"node_id":"MDQ6VXNlcjExNzA4OTk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11708999?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mounicam","html_url":"https:\/\/github.com\/mounicam","followers_url":"https:\/\/api.github.com\/users\/mounicam\/followers","following_url":"https:\/\/api.github.com\/users\/mounicam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mounicam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mounicam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mounicam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mounicam\/orgs","repos_url":"https:\/\/api.github.com\/users\/mounicam\/repos","events_url":"https:\/\/api.github.com\/users\/mounicam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mounicam\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-13T07:50:19Z","updated_at":"2021-01-14T10:19:41Z","closed_at":"2021-01-14T10:19:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1732","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1732","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1732.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1732.patch","merged_at":"2021-01-14T10:19:40Z"},"body":"We want to use TurkCorpus for validation and testing of the sentence simplification task. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1732\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1732\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1731","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1731\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1731\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1731\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1731","id":784744674,"node_id":"MDU6SXNzdWU3ODQ3NDQ2NzQ=","number":1731,"title":"Couldn't reach swda.py","user":{"login":"yangp725","id":13365326,"node_id":"MDQ6VXNlcjEzMzY1MzI2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13365326?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yangp725","html_url":"https:\/\/github.com\/yangp725","followers_url":"https:\/\/api.github.com\/users\/yangp725\/followers","following_url":"https:\/\/api.github.com\/users\/yangp725\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yangp725\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yangp725\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yangp725\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yangp725\/orgs","repos_url":"https:\/\/api.github.com\/users\/yangp725\/repos","events_url":"https:\/\/api.github.com\/users\/yangp725\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yangp725\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-13T02:57:40Z","updated_at":"2021-01-13T11:17:40Z","closed_at":"2021-01-13T11:17:40Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.2.0\/datasets\/swda\/swda.py\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1731\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1731\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1730","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1730\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1730\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1730\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1730","id":784617525,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUzNzgxMDY0","number":1730,"title":"Add MNIST dataset","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-12T21:48:02Z","updated_at":"2021-01-13T10:19:47Z","closed_at":"2021-01-13T10:19:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1730","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1730","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1730.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1730.patch","merged_at":"2021-01-13T10:19:46Z"},"body":"This PR adds the MNIST dataset to the library.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1730\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1730\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1729","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1729\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1729\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1729\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1729","id":784565898,"node_id":"MDU6SXNzdWU3ODQ1NjU4OTg=","number":1729,"title":"Is there support for Deep learning datasets?","user":{"login":"pablodz","id":28235457,"node_id":"MDQ6VXNlcjI4MjM1NDU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28235457?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pablodz","html_url":"https:\/\/github.com\/pablodz","followers_url":"https:\/\/api.github.com\/users\/pablodz\/followers","following_url":"https:\/\/api.github.com\/users\/pablodz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pablodz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pablodz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pablodz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pablodz\/orgs","repos_url":"https:\/\/api.github.com\/users\/pablodz\/repos","events_url":"https:\/\/api.github.com\/users\/pablodz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pablodz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-12T20:22:41Z","updated_at":"2021-03-31T04:24:07Z","closed_at":"2021-03-31T04:24:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I looked around this repository and looking the datasets I think that there's no support for images-datasets. Or am I missing something? For example to add a repo like this https:\/\/github.com\/DZPeru\/fish-datasets","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1729\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1729\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1728","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1728\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1728\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1728\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1728","id":784458342,"node_id":"MDU6SXNzdWU3ODQ0NTgzNDI=","number":1728,"title":"Add an entry to an arrow dataset","user":{"login":"ameet-1997","id":18645407,"node_id":"MDQ6VXNlcjE4NjQ1NDA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18645407?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ameet-1997","html_url":"https:\/\/github.com\/ameet-1997","followers_url":"https:\/\/api.github.com\/users\/ameet-1997\/followers","following_url":"https:\/\/api.github.com\/users\/ameet-1997\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ameet-1997\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ameet-1997\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ameet-1997\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ameet-1997\/orgs","repos_url":"https:\/\/api.github.com\/users\/ameet-1997\/repos","events_url":"https:\/\/api.github.com\/users\/ameet-1997\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ameet-1997\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-01-12T18:01:47Z","updated_at":"2021-01-18T19:15:32Z","closed_at":"2021-01-18T19:15:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Is it possible to add an entry to a dataset object?\r\n\r\n**Motivation: I want to transform the sentences in the dataset and add them to the original dataset**\r\n\r\nFor example, say we have the following code:\r\n\r\n``` python\r\nfrom datasets import load_dataset\r\n\r\n# Load a dataset and print the first examples in the training set\r\nsquad_dataset = load_dataset('squad')\r\nprint(squad_dataset['train'][0])\r\n```\r\n\r\nIs it possible to add an entry to `squad_dataset`? Something like the following?\r\n\r\n``` python\r\nsquad_dataset.append({'text': \"This is a new sentence\"})\r\n```\r\n\r\nThe motivation for doing this is that I want to transform the sentences in the squad dataset and add them to the original dataset.\r\n\r\nIf the above doesn't work, is there any other way of achieving the motivation mentioned above? Perhaps by creating a new arrow dataset by using the older one and the transformer sentences?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1728\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1728\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1727","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1727\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1727\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1727\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1727","id":784435131,"node_id":"MDU6SXNzdWU3ODQ0MzUxMzE=","number":1727,"title":"BLEURT score calculation raises UnrecognizedFlagError","user":{"login":"nadavo","id":6603920,"node_id":"MDQ6VXNlcjY2MDM5MjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6603920?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nadavo","html_url":"https:\/\/github.com\/nadavo","followers_url":"https:\/\/api.github.com\/users\/nadavo\/followers","following_url":"https:\/\/api.github.com\/users\/nadavo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nadavo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nadavo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nadavo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nadavo\/orgs","repos_url":"https:\/\/api.github.com\/users\/nadavo\/repos","events_url":"https:\/\/api.github.com\/users\/nadavo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nadavo\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-01-12T17:27:02Z","updated_at":"2021-04-12T22:21:41Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Calling the `compute` method for **bleurt** metric fails with an `UnrecognizedFlagError` for `FLAGS.bleurt_batch_size`. \r\n\r\nMy environment:\r\n```\r\npython==3.8.5\r\ndatasets==1.2.0\r\ntensorflow==2.3.1\r\ncudatoolkit==11.0.221\r\n```\r\n\r\nTest code for reproducing the error:\r\n```\r\nfrom datasets import load_metric\r\nbleurt = load_metric('bleurt')\r\ngen_text = \"I am walking on the promenade today\"\r\nref_text = \"I am walking along the promenade on this sunny day\"\r\nbleurt.compute(predictions=[test_text], references=[test_text])\r\n```\r\n\r\nError Output:\r\n```\r\nUsing default BLEURT-Base checkpoint for sequence maximum length 128. You can use a bigger model for better results with e.g.: datasets.load_metric('bleurt', 'bleurt-large-512').\r\nINFO:tensorflow:Reading checkpoint \/home\/ubuntu\/.cache\/huggingface\/metrics\/bleurt\/default\/downloads\/extracted\/9aee35580225730ac5422599f35c4986e4c49cafd08082123342b1019720dac4\/bleurt-base-128.\r\nINFO:tensorflow:Config file found, reading.\r\nINFO:tensorflow:Will load checkpoint bert_custom\r\nINFO:tensorflow:Performs basic checks...\r\nINFO:tensorflow:... name:bert_custom\r\nINFO:tensorflow:... vocab_file:vocab.txt\r\nINFO:tensorflow:... bert_config_file:bert_config.json\r\nINFO:tensorflow:... do_lower_case:True\r\nINFO:tensorflow:... max_seq_length:128\r\nINFO:tensorflow:Creating BLEURT scorer.\r\nINFO:tensorflow:Loading model...\r\nINFO:tensorflow:BLEURT initialized.\r\n---------------------------------------------------------------------------\r\nUnrecognizedFlagError Traceback (most recent call last)\r\n in \r\n 2 gen_text = \"I am walking on the promenade today\"\r\n 3 ref_text = \"I am walking along the promenade on this sunny day\"\r\n----> 4 bleurt.compute(predictions=[gen_text], references=[ref_text])\r\n\r\n~\/anaconda3\/envs\/noved\/lib\/python3.8\/site-packages\/datasets\/metric.py in compute(self, *args, **kwargs)\r\n 396 references = self.data[\"references\"]\r\n 397 with temp_seed(self.seed):\r\n--> 398 output = self._compute(predictions=predictions, references=references, **kwargs)\r\n 399 \r\n 400 if self.buf_writer is not None:\r\n\r\n~\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/bleurt\/b1de33e1cbbcb1dbe276c887efa1fad68c6aff913885108078fa1ad408908778\/bleurt.py in _compute(self, predictions, references)\r\n 103 \r\n 104 def _compute(self, predictions, references):\r\n--> 105 scores = self.scorer.score(references=references, candidates=predictions)\r\n 106 return {\"scores\": scores}\r\n\r\n~\/anaconda3\/envs\/noved\/lib\/python3.8\/site-packages\/bleurt\/score.py in score(self, references, candidates, batch_size)\r\n 164 \"\"\"\r\n 165 if not batch_size:\r\n--> 166 batch_size = FLAGS.bleurt_batch_size\r\n 167 \r\n 168 candidates, references = list(candidates), list(references)\r\n\r\n~\/anaconda3\/envs\/noved\/lib\/python3.8\/site-packages\/tensorflow\/python\/platform\/flags.py in __getattr__(self, name)\r\n 83 # a flag.\r\n 84 if not wrapped.is_parsed():\r\n---> 85 wrapped(_sys.argv)\r\n 86 return wrapped.__getattr__(name)\r\n 87 \r\n\r\n~\/anaconda3\/envs\/noved\/lib\/python3.8\/site-packages\/absl\/flags\/_flagvalues.py in __call__(self, argv, known_only)\r\n 643 for name, value in unknown_flags:\r\n 644 suggestions = _helpers.get_flag_suggestions(name, list(self))\r\n--> 645 raise _exceptions.UnrecognizedFlagError(\r\n 646 name, value, suggestions=suggestions)\r\n 647 \r\n\r\nUnrecognizedFlagError: Unknown command line flag 'f'\r\n```\r\n\r\nPossible Fix:\r\nModify `_compute` method https:\/\/github.com\/huggingface\/datasets\/blob\/7e64851a12263dc74d41c668167918484c8000ab\/metrics\/bleurt\/bleurt.py#L104\r\nto receive a `batch_size` argument, for example:\r\n```\r\ndef _compute(self, predictions, references, batch_size=1):\r\n scores = self.scorer.score(references=references, candidates=predictions, batch_size=batch_size)\r\n return {\"scores\": scores}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1727\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1727\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1726","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1726\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1726\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1726\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1726","id":784336370,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUzNTQ0ODg4","number":1726,"title":"Offline loading","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-01-12T15:21:57Z","updated_at":"2021-01-28T18:05:22Z","closed_at":"2021-01-19T16:42:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1726","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1726","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1726.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1726.patch","merged_at":"2021-01-19T16:42:32Z"},"body":"As discussed in #824 it would be cool to make the library work in offline mode.\r\nCurrently if there's not internet connection then modules (datasets or metrics) that have already been loaded in the past can't be loaded and it raises a ConnectionError.\r\nThis is because `prepare_module` fetches online for the latest version of the module.\r\n\r\nTo make it work in offline mode one suggestion was to reload the latest local version of the module.\r\nI implemented that and I also raise a warning saying that the module that is loaded is the latest local version.\r\n```python\r\nlogger.warning(\r\n f\"Using the latest cached version of the module from {cached_module_path} since it \"\r\n f\"couldn't be found locally at {input_path} or remotely ({error_type_that_prevented_reaching_out_remote_stuff}).\"\r\n)\r\n```\r\n\r\nI added tests to make sure it works as expected and I needed to do a few changes in the code to be able to test things properly. In particular I added a parameter `hf_modules_cache` to `init_dynamic_modules` for testing purposes. It makes it possible to have temporary modules caches for testing.\r\n\r\nI also added a `offline` context utility that allows to test part of the code by making all the requests fail as if there was no internet.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1726\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1726\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1725","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1725\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1725\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1725\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1725","id":784182273,"node_id":"MDU6SXNzdWU3ODQxODIyNzM=","number":1725,"title":"load the local dataset","user":{"login":"xinjicong","id":41193842,"node_id":"MDQ6VXNlcjQxMTkzODQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/41193842?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/xinjicong","html_url":"https:\/\/github.com\/xinjicong","followers_url":"https:\/\/api.github.com\/users\/xinjicong\/followers","following_url":"https:\/\/api.github.com\/users\/xinjicong\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/xinjicong\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/xinjicong\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/xinjicong\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/xinjicong\/orgs","repos_url":"https:\/\/api.github.com\/users\/xinjicong\/repos","events_url":"https:\/\/api.github.com\/users\/xinjicong\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/xinjicong\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-01-12T12:12:55Z","updated_at":"2021-03-03T10:55:43Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"your guidebook's example is like\r\n>>>from datasets import load_dataset\r\n>>> dataset = load_dataset('json', data_files='my_file.json')\r\nbut the first arg is path...\r\nso how should i do if i want to load the local dataset for model training?\r\ni will be grateful if you can help me handle this problem!\r\nthanks a lot!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1725\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1725\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1723","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1723\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1723\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1723\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1723","id":783982100,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUzMjQ4MzU1","number":1723,"title":"ADD S3 support for downloading and uploading processed datasets","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-12T07:17:34Z","updated_at":"2021-01-26T17:02:08Z","closed_at":"2021-01-26T17:02:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1723","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1723","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1723.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1723.patch","merged_at":"2021-01-26T17:02:07Z"},"body":"# What does this PR do?\r\n\r\nThis PR adds the functionality to load and save `datasets` from and to s3. \r\nYou can save `datasets` with either `Dataset.save_to_disk()` or `DatasetDict.save_to_disk`. \r\nYou can load `datasets` with either `load_from_disk` or `Dataset.load_from_disk()`, `DatasetDict.load_from_disk()`. \r\n\r\nLoading `csv` or `json` datasets from s3 is not implemented. \r\n\r\nTo save\/load datasets to s3 you either need to provide an `aws_profile`, which is set up on your machine, per default it uses the `default` profile or you have to pass an `aws_access_key_id` and `aws_secret_access_key`. \r\n\r\nThe implementation was done with the `fsspec` and `boto3`.\r\n\r\n\r\n### Example `aws_profile` :\r\n\r\n
\r\n\r\n```python\r\ndataset.save_to_disk(\"s3:\/\/moto-mock-s3-bucket\/datasets\/sdk\", aws_profile=\"hf-sm\")\r\n\r\nload_from_disk(\"s3:\/\/moto-mock-s3-bucket\/datasets\/sdk\", aws_profile=\"hf-sm\")\r\n```\r\n\r\n<\/details>\r\n\r\n\r\n### Example `aws_access_key_id` and `aws_secret_access_key` :\r\n\r\n
\r\n\r\n```python\r\ndataset.save_to_disk(\"s3:\/\/moto-mock-s3-bucket\/datasets\/sdk\",\r\n aws_access_key_id=\"fake_access_key\", \r\n aws_secret_access_key=\"fake_secret_key\"\r\n )\r\n\r\nload_from_disk(\"s3:\/\/moto-mock-s3-bucket\/datasets\/sdk\",\r\n aws_access_key_id=\"fake_access_key\", \r\n aws_secret_access_key=\"fake_secret_key\"\r\n )\r\n```\r\n\r\n<\/details>\r\n\r\nIf you want to load a dataset from a public s3 bucket you can pass `anon=True` \r\n\r\n### Example `anon=True` :\r\n\r\n
\r\n\r\n```python\r\ndataset.save_to_disk(\"s3:\/\/moto-mock-s3-bucket\/datasets\/sdk\", aws_profile=\"hf-sm\")\r\n\r\nload_from_disk(\"s3:\/\/moto-mock-s3-bucketdatasets\/sdk\",anon=True)\r\n```\r\n\r\n<\/details>\r\n\r\n### Full Example\r\n\r\n```python\r\nimport datasets\r\n\r\ndataset = datasets.load_dataset(\"imdb\")\r\nprint(f\"DatasetDict contains {len(dataset)} datasets\")\r\nprint(f\"train Dataset has the size of: {len(dataset['train'])}\")\r\n\r\ndataset.save_to_disk(\"s3:\/\/moto-mock-s3-bucket\/datasets\/sdk\", aws_profile=\"hf-sm\")\r\n\r\nremote_dataset = datasets.load_from_disk(\"s3:\/\/moto-mock-s3-bucket\/datasets\/sdk\", aws_profile=\"hf-sm\")\r\nprint(f\"DatasetDict contains {len(remote_dataset)} datasets\")\r\nprint(f\"train Dataset has the size of: {len(remote_dataset['train'])}\")\r\n```\r\n\r\nRelated to #878 \r\n\r\n\r\nI would also adjust the documentation after the code would be reviewed, as long as I leave the PR in \"draft\" status. Something that we can consider is renaming the functions and changing the `_disk` maybe to `_filesystem` \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1723\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":3,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1723\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1724","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1724\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1724\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1724\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1724","id":784023338,"node_id":"MDU6SXNzdWU3ODQwMjMzMzg=","number":1724,"title":"could not run models on a offline server successfully","user":{"login":"lkcao","id":49967236,"node_id":"MDQ6VXNlcjQ5OTY3MjM2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49967236?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lkcao","html_url":"https:\/\/github.com\/lkcao","followers_url":"https:\/\/api.github.com\/users\/lkcao\/followers","following_url":"https:\/\/api.github.com\/users\/lkcao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lkcao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lkcao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lkcao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lkcao\/orgs","repos_url":"https:\/\/api.github.com\/users\/lkcao\/repos","events_url":"https:\/\/api.github.com\/users\/lkcao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lkcao\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-01-12T06:08:06Z","updated_at":"2021-03-03T15:32:29Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I really need your help about this.\r\nI am trying to fine-tuning a RoBERTa on a remote server, which is strictly banning internet. I try to install all the packages by hand and try to run run_mlm.py on the server. It works well on colab, but when I try to run it on this offline server, it shows:\r\n![image](https:\/\/user-images.githubusercontent.com\/49967236\/104276256-25a88600-546a-11eb-9776-8ec695dfa24e.png)\r\n\r\nis there anything I can do? Is it possible to download all the things in cache and upload it to the server? Please help me out...","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1724\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1724\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1722","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1722\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1722\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1722\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1722","id":783921679,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUzMTk3MTg4","number":1722,"title":"Added unfiltered versions of the Wiki-Auto training data for the GEM simplification task.","user":{"login":"mounicam","id":11708999,"node_id":"MDQ6VXNlcjExNzA4OTk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11708999?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mounicam","html_url":"https:\/\/github.com\/mounicam","followers_url":"https:\/\/api.github.com\/users\/mounicam\/followers","following_url":"https:\/\/api.github.com\/users\/mounicam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mounicam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mounicam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mounicam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mounicam\/orgs","repos_url":"https:\/\/api.github.com\/users\/mounicam\/repos","events_url":"https:\/\/api.github.com\/users\/mounicam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mounicam\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-12T05:26:04Z","updated_at":"2021-01-12T18:14:53Z","closed_at":"2021-01-12T17:35:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1722","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1722","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1722.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1722.patch","merged_at":"2021-01-12T17:35:57Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1722\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1722\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1721","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1721\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1721\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1721\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1721","id":783828428,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUzMTIyODQ5","number":1721,"title":"[Scientific papers] Mirror datasets zip","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-01-12T01:15:40Z","updated_at":"2021-01-12T11:49:15Z","closed_at":"2021-01-12T11:41:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1721","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1721","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1721.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1721.patch","merged_at":"2021-01-12T11:41:47Z"},"body":"Datasets were uploading to https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/scientific_papers\/1.1.1\/arxiv-dataset.zip and https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/scientific_papers\/1.1.1\/pubmed-dataset.zip respectively to escape google drive quota and enable faster download. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1721\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1721\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1720","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1720\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1720\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1720\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1720","id":783721833,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUzMDM0MzYx","number":1720,"title":"Adding the NorNE dataset for NER","user":{"login":"versae","id":173537,"node_id":"MDQ6VXNlcjE3MzUzNw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/173537?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/versae","html_url":"https:\/\/github.com\/versae","followers_url":"https:\/\/api.github.com\/users\/versae\/followers","following_url":"https:\/\/api.github.com\/users\/versae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/versae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/versae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/versae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/versae\/orgs","repos_url":"https:\/\/api.github.com\/users\/versae\/repos","events_url":"https:\/\/api.github.com\/users\/versae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/versae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":13,"created_at":"2021-01-11T21:34:13Z","updated_at":"2021-03-31T14:23:49Z","closed_at":"2021-03-31T14:13:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1720","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1720","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1720.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1720.patch","merged_at":null},"body":"NorNE is a manually annotated corpus of named entities which extends the annotation of the existing Norwegian Dependency Treebank. Comprising both of the official standards of written Norwegian (Bokm\u00e5l and Nynorsk), the corpus contains around 600,000 tokens and annotates a rich set of entity types including persons, organizations, locations, geo-political entities, products, and events, in addition to a class corresponding to nominals derived from names.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1720\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1720\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1719","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1719\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1719\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1719\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1719","id":783557542,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUyODk3MzY4","number":1719,"title":"Fix column list comparison in transmit format","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-11T17:23:56Z","updated_at":"2021-01-11T18:45:03Z","closed_at":"2021-01-11T18:45:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1719","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1719","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1719.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1719.patch","merged_at":"2021-01-11T18:45:02Z"},"body":"As noticed in #1718 the cache might not reload the cache files when new columns were added.\r\nThis is because of an issue in `transmit_format` where the column list comparison fails because the order was not deterministic. This causes the `transmit_format` to apply an unnecessary `set_format` transform with shuffled column names.\r\n\r\nI fixed that by sorting the columns for the comparison and added a test.\r\n\r\nTo properly test that I added a third column `col_3` to the dummy_dataset used for tests.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1719\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1719\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1718","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1718\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1718\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1718\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1718","id":783474753,"node_id":"MDU6SXNzdWU3ODM0NzQ3NTM=","number":1718,"title":"Possible cache miss in datasets","user":{"login":"ofirzaf","id":18296312,"node_id":"MDQ6VXNlcjE4Mjk2MzEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18296312?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ofirzaf","html_url":"https:\/\/github.com\/ofirzaf","followers_url":"https:\/\/api.github.com\/users\/ofirzaf\/followers","following_url":"https:\/\/api.github.com\/users\/ofirzaf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ofirzaf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ofirzaf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ofirzaf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ofirzaf\/orgs","repos_url":"https:\/\/api.github.com\/users\/ofirzaf\/repos","events_url":"https:\/\/api.github.com\/users\/ofirzaf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ofirzaf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":14,"created_at":"2021-01-11T15:37:31Z","updated_at":"2021-04-28T06:35:23Z","closed_at":"2021-01-26T02:47:59Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI am using the datasets package and even though I run the same data processing functions, datasets always recomputes the function instead of using cache.\r\nI have attached an example script that for me reproduces the problem.\r\nIn the attached example the second map function always recomputes instead of loading from cache.\r\nIs this a bug or am I doing something wrong?\r\nIs there a way for fix this and avoid all the recomputation?\r\n\r\nThanks\r\n\r\nEdit:\r\ntransformers==3.5.1\r\ndatasets==1.2.0\r\n\r\n```\r\nfrom datasets import load_dataset\r\nfrom transformers import AutoTokenizer\r\n\r\ndatasets = load_dataset('wikitext', 'wikitext-103-raw-v1')\r\ntokenizer = AutoTokenizer.from_pretrained('bert-base-uncased', use_fast=True)\r\n\r\n\r\ncolumn_names = datasets[\"train\"].column_names\r\ntext_column_name = \"text\" if \"text\" in column_names else column_names[0]\r\ndef tokenize_function(examples):\r\n return tokenizer(examples[text_column_name], return_special_tokens_mask=True)\r\n\r\ntokenized_datasets = datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n num_proc=60,\r\n remove_columns=[text_column_name],\r\n load_from_cache_file=True,\r\n)\r\nmax_seq_length = tokenizer.model_max_length\r\ndef group_texts(examples):\r\n # Concatenate all texts.\r\n concatenated_examples = {\r\n k: sum(examples[k], []) for k in examples.keys()}\r\n total_length = len(concatenated_examples[list(examples.keys())[0]])\r\n # We drop the small remainder, we could add padding if the model supported it instead of this drop, you can\r\n # customize this part to your needs.\r\n total_length = (total_length \/\/ max_seq_length) * max_seq_length\r\n # Split by chunks of max_len.\r\n result = {\r\n k: [t[i: i + max_seq_length]\r\n for i in range(0, total_length, max_seq_length)]\r\n for k, t in concatenated_examples.items()\r\n }\r\n return result\r\n\r\ntokenized_datasets = tokenized_datasets.map(\r\n group_texts,\r\n batched=True,\r\n num_proc=60,\r\n load_from_cache_file=True,\r\n)\r\nprint(tokenized_datasets)\r\n\r\nprint('finished')\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1718\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1718\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1717","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1717\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1717\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1717\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1717","id":783074255,"node_id":"MDU6SXNzdWU3ODMwNzQyNTU=","number":1717,"title":"SciFact dataset - minor changes","user":{"login":"dwadden","id":3091916,"node_id":"MDQ6VXNlcjMwOTE5MTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3091916?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dwadden","html_url":"https:\/\/github.com\/dwadden","followers_url":"https:\/\/api.github.com\/users\/dwadden\/followers","following_url":"https:\/\/api.github.com\/users\/dwadden\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dwadden\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dwadden\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dwadden\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dwadden\/orgs","repos_url":"https:\/\/api.github.com\/users\/dwadden\/repos","events_url":"https:\/\/api.github.com\/users\/dwadden\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dwadden\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-01-11T05:26:40Z","updated_at":"2021-01-26T02:52:17Z","closed_at":"2021-01-26T02:52:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nSciFact dataset creator here. First of all, thanks for adding the dataset to Huggingface, much appreciated!\r\n\r\nI'd like to make a few minor changes, including the citation information and the `_URL` from which to download the dataset. Can I submit a PR for this?\r\n\r\nIt also looks like the dataset is being downloaded directly from Huggingface's Google cloud account rather than via the `_URL` in [scifact.py](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/scifact\/scifact.py). Can you help me update the version on gcloud?\r\n\r\nThanks,\r\n\r\nDave","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1717\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1717\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1716","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1716\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1716\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1716\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1716","id":782819006,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUyMjgzNzE5","number":1716,"title":"Add Hatexplain Dataset","user":{"login":"kushal2000","id":48222101,"node_id":"MDQ6VXNlcjQ4MjIyMTAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48222101?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kushal2000","html_url":"https:\/\/github.com\/kushal2000","followers_url":"https:\/\/api.github.com\/users\/kushal2000\/followers","following_url":"https:\/\/api.github.com\/users\/kushal2000\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kushal2000\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kushal2000\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kushal2000\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kushal2000\/orgs","repos_url":"https:\/\/api.github.com\/users\/kushal2000\/repos","events_url":"https:\/\/api.github.com\/users\/kushal2000\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kushal2000\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-10T13:30:01Z","updated_at":"2021-01-18T14:21:42Z","closed_at":"2021-01-18T14:21:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1716","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1716","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1716.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1716.patch","merged_at":"2021-01-18T14:21:42Z"},"body":"Adding Hatexplain - the first benchmark hate speech dataset covering multiple aspects of the issue","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1716\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1716\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1715","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1715\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1715\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1715\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1715","id":782754441,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUyMjM2NDA5","number":1715,"title":"add Korean intonation-aided intention identification dataset","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-10T06:29:04Z","updated_at":"2021-09-17T16:54:13Z","closed_at":"2021-01-12T17:14:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1715","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1715","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1715.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1715.patch","merged_at":"2021-01-12T17:14:32Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1715\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1715\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1714","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1714\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1714\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1714\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1714","id":782416276,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUxOTc3MDA0","number":1714,"title":"Adding adversarialQA dataset","user":{"login":"maxbartolo","id":15869827,"node_id":"MDQ6VXNlcjE1ODY5ODI3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15869827?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxbartolo","html_url":"https:\/\/github.com\/maxbartolo","followers_url":"https:\/\/api.github.com\/users\/maxbartolo\/followers","following_url":"https:\/\/api.github.com\/users\/maxbartolo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxbartolo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxbartolo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxbartolo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxbartolo\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxbartolo\/repos","events_url":"https:\/\/api.github.com\/users\/maxbartolo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxbartolo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-01-08T21:46:09Z","updated_at":"2021-01-13T16:05:24Z","closed_at":"2021-01-13T16:05:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1714","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1714","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1714.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1714.patch","merged_at":"2021-01-13T16:05:24Z"},"body":"Adding the adversarialQA dataset (https:\/\/adversarialqa.github.io\/) from Beat the AI (https:\/\/arxiv.org\/abs\/2002.00293)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1714\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1714\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1713","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1713\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1713\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1713\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1713","id":782337723,"node_id":"MDU6SXNzdWU3ODIzMzc3MjM=","number":1713,"title":"Installation using conda","user":{"login":"pranav-s","id":9393002,"node_id":"MDQ6VXNlcjkzOTMwMDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9393002?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pranav-s","html_url":"https:\/\/github.com\/pranav-s","followers_url":"https:\/\/api.github.com\/users\/pranav-s\/followers","following_url":"https:\/\/api.github.com\/users\/pranav-s\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pranav-s\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pranav-s\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pranav-s\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pranav-s\/orgs","repos_url":"https:\/\/api.github.com\/users\/pranav-s\/repos","events_url":"https:\/\/api.github.com\/users\/pranav-s\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pranav-s\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-01-08T19:12:15Z","updated_at":"2021-09-17T12:47:40Z","closed_at":"2021-09-17T12:47:40Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Will a conda package for installing datasets be added to the huggingface conda channel? I have installed transformers using conda and would like to use the datasets library to use some of the scripts in the transformers\/examples folder but am unable to do so at the moment as datasets can only be installed using pip and using pip in a conda environment is generally a bad idea in my experience.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1713\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1713\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1712","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1712\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1712\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1712\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1712","id":782313097,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUxODkxMDk4","number":1712,"title":"Silicone","user":{"login":"eusip","id":1551356,"node_id":"MDQ6VXNlcjE1NTEzNTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1551356?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eusip","html_url":"https:\/\/github.com\/eusip","followers_url":"https:\/\/api.github.com\/users\/eusip\/followers","following_url":"https:\/\/api.github.com\/users\/eusip\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eusip\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eusip\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eusip\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eusip\/orgs","repos_url":"https:\/\/api.github.com\/users\/eusip\/repos","events_url":"https:\/\/api.github.com\/users\/eusip\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eusip\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-01-08T18:24:18Z","updated_at":"2021-01-21T14:12:37Z","closed_at":"2021-01-21T10:31:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1712","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1712","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1712.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1712.patch","merged_at":null},"body":"My collaborators and I within the Affective Computing team at Telecom Paris would like to push our spoken dialogue dataset for publication.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1712\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1712\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1711","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1711\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1711\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1711\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1711","id":782129083,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUxNzQxODA2","number":1711,"title":"Fix windows path scheme in cached path","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-08T13:45:56Z","updated_at":"2021-01-11T09:23:20Z","closed_at":"2021-01-11T09:23:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1711","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1711","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1711.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1711.patch","merged_at":"2021-01-11T09:23:19Z"},"body":"As noticed in #807 there's currently an issue with `cached_path` not raising `FileNotFoundError` on windows for absolute paths. This is due to the way we check for a path to be local or not. The check on the scheme using urlparse was incomplete.\r\n\r\nI fixed this and added tests","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1711\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1711\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1710","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1710\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1710\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1710\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1710","id":781914951,"node_id":"MDU6SXNzdWU3ODE5MTQ5NTE=","number":1710,"title":"IsADirectoryError when trying to download C4","user":{"login":"fredriko","id":5771366,"node_id":"MDQ6VXNlcjU3NzEzNjY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5771366?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fredriko","html_url":"https:\/\/github.com\/fredriko","followers_url":"https:\/\/api.github.com\/users\/fredriko\/followers","following_url":"https:\/\/api.github.com\/users\/fredriko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fredriko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fredriko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fredriko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fredriko\/orgs","repos_url":"https:\/\/api.github.com\/users\/fredriko\/repos","events_url":"https:\/\/api.github.com\/users\/fredriko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fredriko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-08T07:31:30Z","updated_at":"2021-01-13T09:44:13Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**TLDR**:\r\n\r\nI fail to download C4 and see a stacktrace originating in `IsADirectoryError` as an explanation for failure.\r\n\r\nHow can the problem be fixed? \r\n\r\n**VERBOSE**:\r\n\r\nI use Python version 3.7 and have the following dependencies listed in my project:\r\n\r\n```\r\ndatasets==1.2.0\r\napache-beam==2.26.0\r\n```\r\n\r\nWhen running the following code, where `\/data\/huggingface\/unpacked\/` contains a single unzipped `wet.paths` file manually downloaded as per the instructions for C4:\r\n\r\n```\r\nfrom datasets import load_dataset\r\n\r\nload_dataset(\"c4\", \"en\", data_dir=\"\/data\/huggingface\/unpacked\", beam_runner='DirectRunner')\r\n```\r\n\r\nI get the following stacktrace:\r\n\r\n```\r\n\/Users\/fredriko\/venv\/misc\/bin\/python \/Users\/fredriko\/source\/misc\/main.py\r\nDownloading and preparing dataset c4\/en (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/Users\/fredriko\/.cache\/huggingface\/datasets\/c4\/en\/2.3.0\/8304cf264cc42bdebcb13fca4b9cb36368a96f557d36f9dc969bebbe2568b283...\r\nTraceback (most recent call last):\r\n File \"\/Users\/fredriko\/source\/misc\/main.py\", line 3, in \r\n load_dataset(\"c4\", \"en\", data_dir=\"\/data\/huggingface\/unpacked\", beam_runner='DirectRunner')\r\n File \"\/Users\/fredriko\/venv\/misc\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 612, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/Users\/fredriko\/venv\/misc\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 527, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/Users\/fredriko\/venv\/misc\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1066, in _download_and_prepare\r\n pipeline=pipeline,\r\n File \"\/Users\/fredriko\/venv\/misc\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 582, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/Users\/fredriko\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/c4\/8304cf264cc42bdebcb13fca4b9cb36368a96f557d36f9dc969bebbe2568b283\/c4.py\", line 190, in _split_generators\r\n file_paths = dl_manager.download_and_extract(files_to_download)\r\n File \"\/Users\/fredriko\/venv\/misc\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 258, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/Users\/fredriko\/venv\/misc\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 189, in download\r\n self._record_sizes_checksums(url_or_urls, downloaded_path_or_paths)\r\n File \"\/Users\/fredriko\/venv\/misc\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 117, in _record_sizes_checksums\r\n self._recorded_sizes_checksums[str(url)] = get_size_checksum_dict(path)\r\n File \"\/Users\/fredriko\/venv\/misc\/lib\/python3.7\/site-packages\/datasets\/utils\/info_utils.py\", line 80, in get_size_checksum_dict\r\n with open(path, \"rb\") as f:\r\nIsADirectoryError: [Errno 21] Is a directory: '\/'\r\n\r\nProcess finished with exit code 1\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1710\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1710\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1709","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1709\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1709\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1709\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1709","id":781875640,"node_id":"MDU6SXNzdWU3ODE4NzU2NDA=","number":1709,"title":"Databases","user":{"login":"JimmyJim1","id":68724553,"node_id":"MDQ6VXNlcjY4NzI0NTUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68724553?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JimmyJim1","html_url":"https:\/\/github.com\/JimmyJim1","followers_url":"https:\/\/api.github.com\/users\/JimmyJim1\/followers","following_url":"https:\/\/api.github.com\/users\/JimmyJim1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JimmyJim1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JimmyJim1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JimmyJim1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JimmyJim1\/orgs","repos_url":"https:\/\/api.github.com\/users\/JimmyJim1\/repos","events_url":"https:\/\/api.github.com\/users\/JimmyJim1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JimmyJim1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-08T06:14:03Z","updated_at":"2021-01-08T09:00:08Z","closed_at":"2021-01-08T09:00:08Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1709\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1709\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1708","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1708\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1708\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1708\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1708","id":781631455,"node_id":"MDU6SXNzdWU3ODE2MzE0NTU=","number":1708,"title":"","user":{"login":"Louiejay54","id":77126849,"node_id":"MDQ6VXNlcjc3MTI2ODQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/77126849?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Louiejay54","html_url":"https:\/\/github.com\/Louiejay54","followers_url":"https:\/\/api.github.com\/users\/Louiejay54\/followers","following_url":"https:\/\/api.github.com\/users\/Louiejay54\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Louiejay54\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Louiejay54\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Louiejay54\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Louiejay54\/orgs","repos_url":"https:\/\/api.github.com\/users\/Louiejay54\/repos","events_url":"https:\/\/api.github.com\/users\/Louiejay54\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Louiejay54\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-07T21:45:24Z","updated_at":"2021-01-08T09:00:01Z","closed_at":"2021-01-08T09:00:01Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1708\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1708\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1707","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1707\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1707\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1707\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1707","id":781507545,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUxMjE5MDk2","number":1707,"title":"Added generated READMEs for datasets that were missing one.","user":{"login":"madlag","id":272253,"node_id":"MDQ6VXNlcjI3MjI1Mw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/272253?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/madlag","html_url":"https:\/\/github.com\/madlag","followers_url":"https:\/\/api.github.com\/users\/madlag\/followers","following_url":"https:\/\/api.github.com\/users\/madlag\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/madlag\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/madlag\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/madlag\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/madlag\/orgs","repos_url":"https:\/\/api.github.com\/users\/madlag\/repos","events_url":"https:\/\/api.github.com\/users\/madlag\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/madlag\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-07T18:10:06Z","updated_at":"2021-01-18T14:32:33Z","closed_at":"2021-01-18T14:32:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1707","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1707","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1707.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1707.patch","merged_at":"2021-01-18T14:32:33Z"},"body":"This is it: we worked on a generator with Yacine @yjernite , and we generated dataset cards for all missing ones (161), with all the information we could gather from datasets repository, and using dummy_data to generate examples when possible.\r\n\r\nCode is available here for the moment: https:\/\/github.com\/madlag\/datasets_readme_generator .\r\nWe will move it to a Hugging Face repository and to https:\/\/huggingface.co\/datasets\/card-creator\/ later.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1707\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1707\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1706","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1706\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1706\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1706\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1706","id":781494476,"node_id":"MDU6SXNzdWU3ODE0OTQ0NzY=","number":1706,"title":"Error when downloading a large dataset on slow connection.","user":{"login":"lucadiliello","id":23355969,"node_id":"MDQ6VXNlcjIzMzU1OTY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23355969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucadiliello","html_url":"https:\/\/github.com\/lucadiliello","followers_url":"https:\/\/api.github.com\/users\/lucadiliello\/followers","following_url":"https:\/\/api.github.com\/users\/lucadiliello\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucadiliello\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucadiliello\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucadiliello\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucadiliello\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucadiliello\/repos","events_url":"https:\/\/api.github.com\/users\/lucadiliello\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucadiliello\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-07T17:48:15Z","updated_at":"2021-01-13T10:35:02Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I receive the following error after about an hour trying to download the `openwebtext` dataset.\r\n\r\nThe code used is:\r\n```python\r\nimport datasets\r\ndatasets.load_dataset(\"openwebtext\")\r\n```\r\n\r\n> Traceback (most recent call last): [4\/28]\r\n> File \"\", line 1, in \r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 610, in load_dataset\r\n> ignore_verifications=ignore_verifications,\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 515, in download_and_prepare\r\n> dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 570, in _download_and_prepare\r\n> split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n> File \"\/home\/lucadiliello\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/openwebtext\/5c636399c7155da97c982d0d70ecdce30fbca66a4eb4fc768ad91f8331edac02\/openwebtext.py\", line 62, in _split_generators\r\n> dl_dir = dl_manager.download_and_extract(_URL)\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 254, in download_and_extract\r\n> return self.extract(self.download(url_or_urls))\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 235, in extract\r\n> num_proc=num_proc,\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n> return function(data_struct)\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 343, in cached_path\r\n> tar_file.extractall(output_path_extracted)\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/tarfile.py\", line 2000, in extractall\r\n> numeric_owner=numeric_owner)\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/tarfile.py\", line 2042, in extract\r\n> numeric_owner=numeric_owner)\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/tarfile.py\", line 2112, in _extract_member\r\n> self.makefile(tarinfo, targetpath)\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/tarfile.py\", line 2161, in makefile\r\n> copyfileobj(source, target, tarinfo.size, ReadError, bufsize)\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/tarfile.py\", line 253, in copyfileobj\r\n> buf = src.read(remainder)\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/lzma.py\", line 200, in read\r\n> return self._buffer.read(size)\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/_compression.py\", line 68, in readinto\r\n> data = self.read(len(byte_view))\r\n> File \"\/home\/lucadiliello\/anaconda3\/envs\/nlp\/lib\/python3.7\/_compression.py\", line 99, in read\r\n> raise EOFError(\"Compressed file ended before the \"\r\n> EOFError: Compressed file ended before the end-of-stream marker was reached\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1706\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1706\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1705","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1705\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1705\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1705\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1705","id":781474949,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUxMTkyMTc4","number":1705,"title":"Add information about caching and verifications in \"Load a Dataset\" docs","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-07T17:18:44Z","updated_at":"2021-01-12T14:08:01Z","closed_at":"2021-01-12T14:08:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1705","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1705","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1705.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1705.patch","merged_at":"2021-01-12T14:08:01Z"},"body":"Related to #215.\r\n\r\nMissing improvements from @lhoestq's #1703.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1705\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1705\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1704","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1704\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1704\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1704\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1704","id":781402757,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUxMTMyNDI1","number":1704,"title":"Update XSUM Factuality DatasetCard","user":{"login":"vineeths96","id":50873201,"node_id":"MDQ6VXNlcjUwODczMjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50873201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vineeths96","html_url":"https:\/\/github.com\/vineeths96","followers_url":"https:\/\/api.github.com\/users\/vineeths96\/followers","following_url":"https:\/\/api.github.com\/users\/vineeths96\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vineeths96\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vineeths96\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vineeths96\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vineeths96\/orgs","repos_url":"https:\/\/api.github.com\/users\/vineeths96\/repos","events_url":"https:\/\/api.github.com\/users\/vineeths96\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vineeths96\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-07T15:37:14Z","updated_at":"2021-01-12T13:30:04Z","closed_at":"2021-01-12T13:30:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1704","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1704","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1704.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1704.patch","merged_at":"2021-01-12T13:30:04Z"},"body":"Update XSUM Factuality DatasetCard","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1704\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1704\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1703","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1703\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1703\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1703\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1703","id":781395146,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUxMTI2MjA5","number":1703,"title":"Improvements regarding caching and fingerprinting","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-01-07T15:26:29Z","updated_at":"2021-01-19T17:32:11Z","closed_at":"2021-01-19T17:32:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1703","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1703","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1703.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1703.patch","merged_at":"2021-01-19T17:32:10Z"},"body":"This PR adds these features:\r\n- Enable\/disable caching\r\n If disabled, the library will no longer reload cached datasets files when applying transforms to the datasets.\r\n It is equivalent to setting `load_from_cache` to `False` in dataset transforms.\r\n```python\r\nfrom datasets import set_caching_enabled\r\n\r\nset_caching_enabled(False)\r\n```\r\n- Allow unpicklable functions in `map`\r\n If an unpicklable function is used, then it's not possible to hash it to update the dataset fingerprint that is used to name cache files. To workaround that, a random fingerprint is generated instead and a warning is raised.\r\n```python\r\nlogger.warning(\r\n f\"Transform {transform} couldn't be hashed properly, a random hash was used instead. \"\r\n \"Make sure your transforms and parameters are serializable with pickle or dill for the dataset fingerprinting and caching to work. \"\r\n \"If you reuse this transform, the caching mechanism will consider it to be different from the previous calls and recompute everything.\"\r\n)\r\n```\r\n\r\nand also (open to discussion, EDIT: actually NOT included):\r\n- Enable\/disable fingerprinting\r\n Fingerprinting allows to have one deterministic fingerprint per dataset state.\r\n A dataset fingerprint is updated after each transform.\r\n Re-running the same transforms on a dataset in a different session results in the same fingerprint.\r\n Disabling the fingerprinting mechanism makes all the fingerprints random.\r\n Since the caching mechanism uses fingerprints to name the cache files, then cache file names will be different.\r\n Therefore disabling fingerprinting will prevent the caching mechanism from reloading datasets files that have already been computed.\r\n Disabling fingerprinting may speed up the lib for users that don't care about this feature and don't want to use caching.\r\n```python\r\nfrom datasets import set_fingerprinting_enabled\r\n\r\nset_fingerprinting_enabled(False)\r\n```\r\n\r\nOther details:\r\n- I renamed the `fingerprint` decorator to `fingerprint_transform` since the name was clearly not explicit. This decorator is used on dataset transform functions to allow them to update fingerprints.\r\n- I added some `ignore_kwargs` when decorating transforms with `fingerprint_transform`, to make the fingerprint update not sensible to kwargs like `load_from_cache` or `cache_file_name`.\r\n\r\nTodo: tests for set_fingerprinting_enabled + documentation for all the above features","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1703\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1703\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1702","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1702\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1702\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1702\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1702","id":781383277,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUxMTE2NDc0","number":1702,"title":"Fix importlib metdata import in py38","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-07T15:10:30Z","updated_at":"2021-01-08T10:47:15Z","closed_at":"2021-01-08T10:47:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1702","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1702","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1702.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1702.patch","merged_at":"2021-01-08T10:47:14Z"},"body":"In Python 3.8 there's no need to install `importlib_metadata` since it already exists as `importlib.metadata` in the standard lib.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1702\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1702\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1701","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1701\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1701\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1701\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1701","id":781345717,"node_id":"MDU6SXNzdWU3ODEzNDU3MTc=","number":1701,"title":"Some datasets miss dataset_infos.json or dummy_data.zip","user":{"login":"madlag","id":272253,"node_id":"MDQ6VXNlcjI3MjI1Mw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/272253?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/madlag","html_url":"https:\/\/github.com\/madlag","followers_url":"https:\/\/api.github.com\/users\/madlag\/followers","following_url":"https:\/\/api.github.com\/users\/madlag\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/madlag\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/madlag\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/madlag\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/madlag\/orgs","repos_url":"https:\/\/api.github.com\/users\/madlag\/repos","events_url":"https:\/\/api.github.com\/users\/madlag\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/madlag\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-07T14:17:13Z","updated_at":"2021-01-12T13:40:46Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"While working on dataset REAME generation script at https:\/\/github.com\/madlag\/datasets_readme_generator , I noticed that some datasets miss a dataset_infos.json : \r\n\r\n```\r\nc4\r\nlm1b\r\nreclor\r\nwikihow\r\n```\r\n\r\nAnd some does not have a dummy_data.zip : \r\n\r\n```\r\nkor_nli\r\nmath_dataset\r\nmlqa\r\nms_marco\r\nnewsgroup\r\nqa4mre\r\nqangaroo\r\nreddit_tifu\r\nsuper_glue\r\ntrivia_qa\r\nweb_of_science\r\nwmt14\r\nwmt15\r\nwmt16\r\nwmt17\r\nwmt18\r\nwmt19\r\nxtreme\r\n```\r\n\r\nBut it seems that some of those last do have a \"dummy\" directory .\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1701\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1701\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1700","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1700\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1700\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1700\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1700","id":781333589,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUxMDc1NTg2","number":1700,"title":"Update Curiosity dialogs DatasetCard","user":{"login":"vineeths96","id":50873201,"node_id":"MDQ6VXNlcjUwODczMjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50873201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vineeths96","html_url":"https:\/\/github.com\/vineeths96","followers_url":"https:\/\/api.github.com\/users\/vineeths96\/followers","following_url":"https:\/\/api.github.com\/users\/vineeths96\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vineeths96\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vineeths96\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vineeths96\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vineeths96\/orgs","repos_url":"https:\/\/api.github.com\/users\/vineeths96\/repos","events_url":"https:\/\/api.github.com\/users\/vineeths96\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vineeths96\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-07T13:59:27Z","updated_at":"2021-01-12T18:51:32Z","closed_at":"2021-01-12T18:51:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1700","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1700","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1700.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1700.patch","merged_at":"2021-01-12T18:51:32Z"},"body":"Update Curiosity dialogs DatasetCard\r\n\r\nThere are some entries in the data fields section yet to be filled. There is little information regarding those fields.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1700\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1700\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1699","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1699\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1699\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1699\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1699","id":781271558,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUxMDIzODE5","number":1699,"title":"Update DBRD dataset card and download URL","user":{"login":"benjaminvdb","id":8875786,"node_id":"MDQ6VXNlcjg4NzU3ODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8875786?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/benjaminvdb","html_url":"https:\/\/github.com\/benjaminvdb","followers_url":"https:\/\/api.github.com\/users\/benjaminvdb\/followers","following_url":"https:\/\/api.github.com\/users\/benjaminvdb\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/benjaminvdb\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/benjaminvdb\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/benjaminvdb\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/benjaminvdb\/orgs","repos_url":"https:\/\/api.github.com\/users\/benjaminvdb\/repos","events_url":"https:\/\/api.github.com\/users\/benjaminvdb\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/benjaminvdb\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-07T12:16:43Z","updated_at":"2021-01-07T13:41:39Z","closed_at":"2021-01-07T13:40:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1699","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1699","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1699.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1699.patch","merged_at":"2021-01-07T13:40:59Z"},"body":"I've added the Dutch Bood Review Dataset (DBRD) during the recent sprint. This pull request makes two minor changes:\r\n\r\n1. I'm changing the download URL from Google Drive to the dataset's GitHub release package. This is now possible because of PR #1316.\r\n2. I've updated the dataset card.\r\n\r\nCheers! \ud83d\ude04","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1699\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1699\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1698","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1698\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1698\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1698\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1698","id":781152561,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUwOTI0ODQ3","number":1698,"title":"Update Coached Conv Pref DatasetCard","user":{"login":"vineeths96","id":50873201,"node_id":"MDQ6VXNlcjUwODczMjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50873201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vineeths96","html_url":"https:\/\/github.com\/vineeths96","followers_url":"https:\/\/api.github.com\/users\/vineeths96\/followers","following_url":"https:\/\/api.github.com\/users\/vineeths96\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vineeths96\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vineeths96\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vineeths96\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vineeths96\/orgs","repos_url":"https:\/\/api.github.com\/users\/vineeths96\/repos","events_url":"https:\/\/api.github.com\/users\/vineeths96\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vineeths96\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-07T09:07:16Z","updated_at":"2021-01-08T17:04:33Z","closed_at":"2021-01-08T17:04:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1698","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1698","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1698.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1698.patch","merged_at":"2021-01-08T17:04:32Z"},"body":"Update Coached Conversation Preferance DatasetCard","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1698\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1698\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1697","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1697\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1697\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1697\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1697","id":781126579,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUwOTAzNzI5","number":1697,"title":"Update DialogRE DatasetCard","user":{"login":"vineeths96","id":50873201,"node_id":"MDQ6VXNlcjUwODczMjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50873201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vineeths96","html_url":"https:\/\/github.com\/vineeths96","followers_url":"https:\/\/api.github.com\/users\/vineeths96\/followers","following_url":"https:\/\/api.github.com\/users\/vineeths96\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vineeths96\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vineeths96\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vineeths96\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vineeths96\/orgs","repos_url":"https:\/\/api.github.com\/users\/vineeths96\/repos","events_url":"https:\/\/api.github.com\/users\/vineeths96\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vineeths96\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-07T08:22:33Z","updated_at":"2021-01-07T13:34:28Z","closed_at":"2021-01-07T13:34:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1697","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1697","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1697.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1697.patch","merged_at":"2021-01-07T13:34:28Z"},"body":"Update the information in the dataset card for the Dialog RE dataset. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1697\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1697\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1696","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1696\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1696\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1696\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1696","id":781096918,"node_id":"MDU6SXNzdWU3ODEwOTY5MTg=","number":1696,"title":"Unable to install datasets","user":{"login":"glee2429","id":12635475,"node_id":"MDQ6VXNlcjEyNjM1NDc1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12635475?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/glee2429","html_url":"https:\/\/github.com\/glee2429","followers_url":"https:\/\/api.github.com\/users\/glee2429\/followers","following_url":"https:\/\/api.github.com\/users\/glee2429\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/glee2429\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/glee2429\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/glee2429\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/glee2429\/orgs","repos_url":"https:\/\/api.github.com\/users\/glee2429\/repos","events_url":"https:\/\/api.github.com\/users\/glee2429\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/glee2429\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-01-07T07:24:37Z","updated_at":"2021-01-08T00:33:05Z","closed_at":"2021-01-07T22:06:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"** Edit **\r\nI believe there's a bug with the package when you're installing it with Python 3.9. I recommend sticking with previous versions. Thanks, @thomwolf for the insight! \r\n\r\n**Short description**\r\n\r\nI followed the instructions for installing datasets (https:\/\/huggingface.co\/docs\/datasets\/installation.html). However, while I tried to download datasets using `pip install datasets` I got a massive error message after getting stuck at \"Installing build dependencies...\" \r\n\r\nI was wondering if this problem can be fixed by creating a virtual environment, but it didn't help. Can anyone offer some advice on how to fix this issue? \r\n\r\nHere's an error message: \r\n\r\n`(env) Gas-MacBook-Pro:Downloads destiny$ pip install datasets\r\nCollecting datasets\r\n Using cached datasets-1.2.0-py3-none-any.whl (159 kB)\r\nCollecting numpy>=1.17\r\n Using cached numpy-1.19.5-cp39-cp39-macosx_10_9_x86_64.whl (15.6 MB)\r\nCollecting pyarrow>=0.17.1\r\n Using cached pyarrow-2.0.0.tar.gz (58.9 MB)\r\n....\r\n\r\n _configtest.c:9:5: warning: incompatible redeclaration of library function 'ceilf' [-Wincompatible-library-redeclaration]\r\n int ceilf (void);\r\n ^\r\n _configtest.c:9:5: note: 'ceilf' is a builtin with type 'float (float)'\r\n _configtest.c:10:5: warning: incompatible redeclaration of library function 'rintf' [-Wincompatible-library-redeclaration]\r\n int rintf (void);\r\n ^\r\n _configtest.c:10:5: note: 'rintf' is a builtin with type 'float (float)'\r\n _configtest.c:11:5: warning: incompatible redeclaration of library function 'truncf' [-Wincompatible-library-redeclaration]\r\n int truncf (void);\r\n ^\r\n _configtest.c:11:5: note: 'truncf' is a builtin with type 'float (float)'\r\n _configtest.c:12:5: warning: incompatible redeclaration of library function 'sqrtf' [-Wincompatible-library-redeclaration]\r\n int sqrtf (void);\r\n ^\r\n _configtest.c:12:5: note: 'sqrtf' is a builtin with type 'float (float)'\r\n _configtest.c:13:5: warning: incompatible redeclaration of library function 'log10f' [-Wincompatible-library-redeclaration]\r\n int log10f (void);\r\n ^\r\n _configtest.c:13:5: note: 'log10f' is a builtin with type 'float (float)'\r\n _configtest.c:14:5: warning: incompatible redeclaration of library function 'logf' [-Wincompatible-library-redeclaration]\r\n int logf (void);\r\n ^\r\n _configtest.c:14:5: note: 'logf' is a builtin with type 'float (float)'\r\n _configtest.c:15:5: warning: incompatible redeclaration of library function 'log1pf' [-Wincompatible-library-redeclaration]\r\n int log1pf (void);\r\n ^\r\n _configtest.c:15:5: note: 'log1pf' is a builtin with type 'float (float)'\r\n _configtest.c:16:5: warning: incompatible redeclaration of library function 'expf' [-Wincompatible-library-redeclaration]\r\n int expf (void);\r\n ^\r\n _configtest.c:16:5: note: 'expf' is a builtin with type 'float (float)'\r\n _configtest.c:17:5: warning: incompatible redeclaration of library function 'expm1f' [-Wincompatible-library-redeclaration]\r\n int expm1f (void);\r\n ^\r\n _configtest.c:17:5: note: 'expm1f' is a builtin with type 'float (float)'\r\n _configtest.c:18:5: warning: incompatible redeclaration of library function 'asinf' [-Wincompatible-library-redeclaration]\r\n int asinf (void);\r\n ^\r\n _configtest.c:18:5: note: 'asinf' is a builtin with type 'float (float)'\r\n _configtest.c:19:5: warning: incompatible redeclaration of library function 'acosf' [-Wincompatible-library-redeclaration]\r\n int acosf (void);\r\n ^\r\n _configtest.c:19:5: note: 'acosf' is a builtin with type 'float (float)'\r\n _configtest.c:20:5: warning: incompatible redeclaration of library function 'atanf' [-Wincompatible-library-redeclaration]\r\n int atanf (void);\r\n ^\r\n _configtest.c:20:5: note: 'atanf' is a builtin with type 'float (float)'\r\n _configtest.c:21:5: warning: incompatible redeclaration of library function 'asinhf' [-Wincompatible-library-redeclaration]\r\n int asinhf (void);\r\n ^\r\n _configtest.c:21:5: note: 'asinhf' is a builtin with type 'float (float)'\r\n _configtest.c:22:5: warning: incompatible redeclaration of library function 'acoshf' [-Wincompatible-library-redeclaration]\r\n int acoshf (void);\r\n ^\r\n _configtest.c:22:5: note: 'acoshf' is a builtin with type 'float (float)'\r\n _configtest.c:23:5: warning: incompatible redeclaration of library function 'atanhf' [-Wincompatible-library-redeclaration]\r\n int atanhf (void);\r\n ^\r\n _configtest.c:23:5: note: 'atanhf' is a builtin with type 'float (float)'\r\n _configtest.c:24:5: warning: incompatible redeclaration of library function 'hypotf' [-Wincompatible-library-redeclaration]\r\n int hypotf (void);\r\n ^\r\n _configtest.c:24:5: note: 'hypotf' is a builtin with type 'float (float, float)'\r\n _configtest.c:25:5: warning: incompatible redeclaration of library function 'atan2f' [-Wincompatible-library-redeclaration]\r\n int atan2f (void);\r\n ^\r\n _configtest.c:25:5: note: 'atan2f' is a builtin with type 'float (float, float)'\r\n _configtest.c:26:5: warning: incompatible redeclaration of library function 'powf' [-Wincompatible-library-redeclaration]\r\n int powf (void);\r\n ^\r\n _configtest.c:26:5: note: 'powf' is a builtin with type 'float (float, float)'\r\n _configtest.c:27:5: warning: incompatible redeclaration of library function 'fmodf' [-Wincompatible-library-redeclaration]\r\n int fmodf (void);\r\n ^\r\n _configtest.c:27:5: note: 'fmodf' is a builtin with type 'float (float, float)'\r\n _configtest.c:28:5: warning: incompatible redeclaration of library function 'modff' [-Wincompatible-library-redeclaration]\r\n int modff (void);\r\n ^\r\n _configtest.c:28:5: note: 'modff' is a builtin with type 'float (float, float *)'\r\n _configtest.c:29:5: warning: incompatible redeclaration of library function 'frexpf' [-Wincompatible-library-redeclaration]\r\n int frexpf (void);\r\n ^\r\n _configtest.c:29:5: note: 'frexpf' is a builtin with type 'float (float, int *)'\r\n _configtest.c:30:5: warning: incompatible redeclaration of library function 'ldexpf' [-Wincompatible-library-redeclaration]\r\n int ldexpf (void);\r\n ^\r\n _configtest.c:30:5: note: 'ldexpf' is a builtin with type 'float (float, int)'\r\n _configtest.c:31:5: warning: incompatible redeclaration of library function 'exp2f' [-Wincompatible-library-redeclaration]\r\n int exp2f (void);\r\n ^\r\n _configtest.c:31:5: note: 'exp2f' is a builtin with type 'float (float)'\r\n _configtest.c:32:5: warning: incompatible redeclaration of library function 'log2f' [-Wincompatible-library-redeclaration]\r\n int log2f (void);\r\n ^\r\n _configtest.c:32:5: note: 'log2f' is a builtin with type 'float (float)'\r\n _configtest.c:33:5: warning: incompatible redeclaration of library function 'copysignf' [-Wincompatible-library-redeclaration]\r\n int copysignf (void);\r\n ^\r\n _configtest.c:33:5: note: 'copysignf' is a builtin with type 'float (float, float)'\r\n _configtest.c:34:5: warning: incompatible redeclaration of library function 'nextafterf' [-Wincompatible-library-redeclaration]\r\n int nextafterf (void);\r\n ^\r\n _configtest.c:34:5: note: 'nextafterf' is a builtin with type 'float (float, float)'\r\n _configtest.c:35:5: warning: incompatible redeclaration of library function 'cbrtf' [-Wincompatible-library-redeclaration]\r\n int cbrtf (void);\r\n ^\r\n _configtest.c:35:5: note: 'cbrtf' is a builtin with type 'float (float)'\r\n 35 warnings generated.\r\n clang _configtest.o -o _configtest\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d _configtest\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n _configtest.c:1:5: warning: incompatible redeclaration of library function 'sinl' [-Wincompatible-library-redeclaration]\r\n int sinl (void);\r\n ^\r\n _configtest.c:1:5: note: 'sinl' is a builtin with type 'long double (long double)'\r\n _configtest.c:2:5: warning: incompatible redeclaration of library function 'cosl' [-Wincompatible-library-redeclaration]\r\n int cosl (void);\r\n ^\r\n _configtest.c:2:5: note: 'cosl' is a builtin with type 'long double (long double)'\r\n _configtest.c:3:5: warning: incompatible redeclaration of library function 'tanl' [-Wincompatible-library-redeclaration]\r\n int tanl (void);\r\n ^\r\n _configtest.c:3:5: note: 'tanl' is a builtin with type 'long double (long double)'\r\n _configtest.c:4:5: warning: incompatible redeclaration of library function 'sinhl' [-Wincompatible-library-redeclaration]\r\n int sinhl (void);\r\n ^\r\n _configtest.c:4:5: note: 'sinhl' is a builtin with type 'long double (long double)'\r\n _configtest.c:5:5: warning: incompatible redeclaration of library function 'coshl' [-Wincompatible-library-redeclaration]\r\n int coshl (void);\r\n ^\r\n _configtest.c:5:5: note: 'coshl' is a builtin with type 'long double (long double)'\r\n _configtest.c:6:5: warning: incompatible redeclaration of library function 'tanhl' [-Wincompatible-library-redeclaration]\r\n int tanhl (void);\r\n ^\r\n _configtest.c:6:5: note: 'tanhl' is a builtin with type 'long double (long double)'\r\n _configtest.c:7:5: warning: incompatible redeclaration of library function 'fabsl' [-Wincompatible-library-redeclaration]\r\n int fabsl (void);\r\n ^\r\n _configtest.c:7:5: note: 'fabsl' is a builtin with type 'long double (long double)'\r\n _configtest.c:8:5: warning: incompatible redeclaration of library function 'floorl' [-Wincompatible-library-redeclaration]\r\n int floorl (void);\r\n ^\r\n _configtest.c:8:5: note: 'floorl' is a builtin with type 'long double (long double)'\r\n _configtest.c:9:5: warning: incompatible redeclaration of library function 'ceill' [-Wincompatible-library-redeclaration]\r\n int ceill (void);\r\n ^\r\n _configtest.c:9:5: note: 'ceill' is a builtin with type 'long double (long double)'\r\n _configtest.c:10:5: warning: incompatible redeclaration of library function 'rintl' [-Wincompatible-library-redeclaration]\r\n int rintl (void);\r\n ^\r\n _configtest.c:10:5: note: 'rintl' is a builtin with type 'long double (long double)'\r\n _configtest.c:11:5: warning: incompatible redeclaration of library function 'truncl' [-Wincompatible-library-redeclaration]\r\n int truncl (void);\r\n ^\r\n _configtest.c:11:5: note: 'truncl' is a builtin with type 'long double (long double)'\r\n _configtest.c:12:5: warning: incompatible redeclaration of library function 'sqrtl' [-Wincompatible-library-redeclaration]\r\n int sqrtl (void);\r\n ^\r\n _configtest.c:12:5: note: 'sqrtl' is a builtin with type 'long double (long double)'\r\n _configtest.c:13:5: warning: incompatible redeclaration of library function 'log10l' [-Wincompatible-library-redeclaration]\r\n int log10l (void);\r\n ^\r\n _configtest.c:13:5: note: 'log10l' is a builtin with type 'long double (long double)'\r\n _configtest.c:14:5: warning: incompatible redeclaration of library function 'logl' [-Wincompatible-library-redeclaration]\r\n int logl (void);\r\n ^\r\n _configtest.c:14:5: note: 'logl' is a builtin with type 'long double (long double)'\r\n _configtest.c:15:5: warning: incompatible redeclaration of library function 'log1pl' [-Wincompatible-library-redeclaration]\r\n int log1pl (void);\r\n ^\r\n _configtest.c:15:5: note: 'log1pl' is a builtin with type 'long double (long double)'\r\n _configtest.c:16:5: warning: incompatible redeclaration of library function 'expl' [-Wincompatible-library-redeclaration]\r\n int expl (void);\r\n ^\r\n _configtest.c:16:5: note: 'expl' is a builtin with type 'long double (long double)'\r\n _configtest.c:17:5: warning: incompatible redeclaration of library function 'expm1l' [-Wincompatible-library-redeclaration]\r\n int expm1l (void);\r\n ^\r\n _configtest.c:17:5: note: 'expm1l' is a builtin with type 'long double (long double)'\r\n _configtest.c:18:5: warning: incompatible redeclaration of library function 'asinl' [-Wincompatible-library-redeclaration]\r\n int asinl (void);\r\n ^\r\n _configtest.c:18:5: note: 'asinl' is a builtin with type 'long double (long double)'\r\n _configtest.c:19:5: warning: incompatible redeclaration of library function 'acosl' [-Wincompatible-library-redeclaration]\r\n int acosl (void);\r\n ^\r\n _configtest.c:19:5: note: 'acosl' is a builtin with type 'long double (long double)'\r\n _configtest.c:20:5: warning: incompatible redeclaration of library function 'atanl' [-Wincompatible-library-redeclaration]\r\n int atanl (void);\r\n ^\r\n _configtest.c:20:5: note: 'atanl' is a builtin with type 'long double (long double)'\r\n _configtest.c:21:5: warning: incompatible redeclaration of library function 'asinhl' [-Wincompatible-library-redeclaration]\r\n int asinhl (void);\r\n ^\r\n _configtest.c:21:5: note: 'asinhl' is a builtin with type 'long double (long double)'\r\n _configtest.c:22:5: warning: incompatible redeclaration of library function 'acoshl' [-Wincompatible-library-redeclaration]\r\n int acoshl (void);\r\n ^\r\n _configtest.c:22:5: note: 'acoshl' is a builtin with type 'long double (long double)'\r\n _configtest.c:23:5: warning: incompatible redeclaration of library function 'atanhl' [-Wincompatible-library-redeclaration]\r\n int atanhl (void);\r\n ^\r\n _configtest.c:23:5: note: 'atanhl' is a builtin with type 'long double (long double)'\r\n _configtest.c:24:5: warning: incompatible redeclaration of library function 'hypotl' [-Wincompatible-library-redeclaration]\r\n int hypotl (void);\r\n ^\r\n _configtest.c:24:5: note: 'hypotl' is a builtin with type 'long double (long double, long double)'\r\n _configtest.c:25:5: warning: incompatible redeclaration of library function 'atan2l' [-Wincompatible-library-redeclaration]\r\n int atan2l (void);\r\n ^\r\n _configtest.c:25:5: note: 'atan2l' is a builtin with type 'long double (long double, long double)'\r\n _configtest.c:26:5: warning: incompatible redeclaration of library function 'powl' [-Wincompatible-library-redeclaration]\r\n int powl (void);\r\n ^\r\n _configtest.c:26:5: note: 'powl' is a builtin with type 'long double (long double, long double)'\r\n _configtest.c:27:5: warning: incompatible redeclaration of library function 'fmodl' [-Wincompatible-library-redeclaration]\r\n int fmodl (void);\r\n ^\r\n _configtest.c:27:5: note: 'fmodl' is a builtin with type 'long double (long double, long double)'\r\n _configtest.c:28:5: warning: incompatible redeclaration of library function 'modfl' [-Wincompatible-library-redeclaration]\r\n int modfl (void);\r\n ^\r\n _configtest.c:28:5: note: 'modfl' is a builtin with type 'long double (long double, long double *)'\r\n _configtest.c:29:5: warning: incompatible redeclaration of library function 'frexpl' [-Wincompatible-library-redeclaration]\r\n int frexpl (void);\r\n ^\r\n _configtest.c:29:5: note: 'frexpl' is a builtin with type 'long double (long double, int *)'\r\n _configtest.c:30:5: warning: incompatible redeclaration of library function 'ldexpl' [-Wincompatible-library-redeclaration]\r\n int ldexpl (void);\r\n ^\r\n _configtest.c:30:5: note: 'ldexpl' is a builtin with type 'long double (long double, int)'\r\n _configtest.c:31:5: warning: incompatible redeclaration of library function 'exp2l' [-Wincompatible-library-redeclaration]\r\n int exp2l (void);\r\n ^\r\n _configtest.c:31:5: note: 'exp2l' is a builtin with type 'long double (long double)'\r\n _configtest.c:32:5: warning: incompatible redeclaration of library function 'log2l' [-Wincompatible-library-redeclaration]\r\n int log2l (void);\r\n ^\r\n _configtest.c:32:5: note: 'log2l' is a builtin with type 'long double (long double)'\r\n _configtest.c:33:5: warning: incompatible redeclaration of library function 'copysignl' [-Wincompatible-library-redeclaration]\r\n int copysignl (void);\r\n ^\r\n _configtest.c:33:5: note: 'copysignl' is a builtin with type 'long double (long double, long double)'\r\n _configtest.c:34:5: warning: incompatible redeclaration of library function 'nextafterl' [-Wincompatible-library-redeclaration]\r\n int nextafterl (void);\r\n ^\r\n _configtest.c:34:5: note: 'nextafterl' is a builtin with type 'long double (long double, long double)'\r\n _configtest.c:35:5: warning: incompatible redeclaration of library function 'cbrtl' [-Wincompatible-library-redeclaration]\r\n int cbrtl (void);\r\n ^\r\n _configtest.c:35:5: note: 'cbrtl' is a builtin with type 'long double (long double)'\r\n 35 warnings generated.\r\n clang _configtest.o -o _configtest\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d _configtest\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n _configtest.c:8:12: error: use of undeclared identifier 'HAVE_DECL_SIGNBIT'\r\n (void) HAVE_DECL_SIGNBIT;\r\n ^\r\n 1 error generated.\r\n failure.\r\n removing: _configtest.c _configtest.o\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n _configtest.c:1:5: warning: incompatible redeclaration of library function 'cabs' [-Wincompatible-library-redeclaration]\r\n int cabs (void);\r\n ^\r\n _configtest.c:1:5: note: 'cabs' is a builtin with type 'double (_Complex double)'\r\n _configtest.c:2:5: warning: incompatible redeclaration of library function 'cacos' [-Wincompatible-library-redeclaration]\r\n int cacos (void);\r\n ^\r\n _configtest.c:2:5: note: 'cacos' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:3:5: warning: incompatible redeclaration of library function 'cacosh' [-Wincompatible-library-redeclaration]\r\n int cacosh (void);\r\n ^\r\n _configtest.c:3:5: note: 'cacosh' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:4:5: warning: incompatible redeclaration of library function 'carg' [-Wincompatible-library-redeclaration]\r\n int carg (void);\r\n ^\r\n _configtest.c:4:5: note: 'carg' is a builtin with type 'double (_Complex double)'\r\n _configtest.c:5:5: warning: incompatible redeclaration of library function 'casin' [-Wincompatible-library-redeclaration]\r\n int casin (void);\r\n ^\r\n _configtest.c:5:5: note: 'casin' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:6:5: warning: incompatible redeclaration of library function 'casinh' [-Wincompatible-library-redeclaration]\r\n int casinh (void);\r\n ^\r\n _configtest.c:6:5: note: 'casinh' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:7:5: warning: incompatible redeclaration of library function 'catan' [-Wincompatible-library-redeclaration]\r\n int catan (void);\r\n ^\r\n _configtest.c:7:5: note: 'catan' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:8:5: warning: incompatible redeclaration of library function 'catanh' [-Wincompatible-library-redeclaration]\r\n int catanh (void);\r\n ^\r\n _configtest.c:8:5: note: 'catanh' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:9:5: warning: incompatible redeclaration of library function 'ccos' [-Wincompatible-library-redeclaration]\r\n int ccos (void);\r\n ^\r\n _configtest.c:9:5: note: 'ccos' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:10:5: warning: incompatible redeclaration of library function 'ccosh' [-Wincompatible-library-redeclaration]\r\n int ccosh (void);\r\n ^\r\n _configtest.c:10:5: note: 'ccosh' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:11:5: warning: incompatible redeclaration of library function 'cexp' [-Wincompatible-library-redeclaration]\r\n int cexp (void);\r\n ^\r\n _configtest.c:11:5: note: 'cexp' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:12:5: warning: incompatible redeclaration of library function 'cimag' [-Wincompatible-library-redeclaration]\r\n int cimag (void);\r\n ^\r\n _configtest.c:12:5: note: 'cimag' is a builtin with type 'double (_Complex double)'\r\n _configtest.c:13:5: warning: incompatible redeclaration of library function 'clog' [-Wincompatible-library-redeclaration]\r\n int clog (void);\r\n ^\r\n _configtest.c:13:5: note: 'clog' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:14:5: warning: incompatible redeclaration of library function 'conj' [-Wincompatible-library-redeclaration]\r\n int conj (void);\r\n ^\r\n _configtest.c:14:5: note: 'conj' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:15:5: warning: incompatible redeclaration of library function 'cpow' [-Wincompatible-library-redeclaration]\r\n int cpow (void);\r\n ^\r\n _configtest.c:15:5: note: 'cpow' is a builtin with type '_Complex double (_Complex double, _Complex double)'\r\n _configtest.c:16:5: warning: incompatible redeclaration of library function 'cproj' [-Wincompatible-library-redeclaration]\r\n int cproj (void);\r\n ^\r\n _configtest.c:16:5: note: 'cproj' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:17:5: warning: incompatible redeclaration of library function 'creal' [-Wincompatible-library-redeclaration]\r\n int creal (void);\r\n ^\r\n _configtest.c:17:5: note: 'creal' is a builtin with type 'double (_Complex double)'\r\n _configtest.c:18:5: warning: incompatible redeclaration of library function 'csin' [-Wincompatible-library-redeclaration]\r\n int csin (void);\r\n ^\r\n _configtest.c:18:5: note: 'csin' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:19:5: warning: incompatible redeclaration of library function 'csinh' [-Wincompatible-library-redeclaration]\r\n int csinh (void);\r\n ^\r\n _configtest.c:19:5: note: 'csinh' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:20:5: warning: incompatible redeclaration of library function 'csqrt' [-Wincompatible-library-redeclaration]\r\n int csqrt (void);\r\n ^\r\n _configtest.c:20:5: note: 'csqrt' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:21:5: warning: incompatible redeclaration of library function 'ctan' [-Wincompatible-library-redeclaration]\r\n int ctan (void);\r\n ^\r\n _configtest.c:21:5: note: 'ctan' is a builtin with type '_Complex double (_Complex double)'\r\n _configtest.c:22:5: warning: incompatible redeclaration of library function 'ctanh' [-Wincompatible-library-redeclaration]\r\n int ctanh (void);\r\n ^\r\n _configtest.c:22:5: note: 'ctanh' is a builtin with type '_Complex double (_Complex double)'\r\n 22 warnings generated.\r\n clang _configtest.o -o _configtest\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d _configtest\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n _configtest.c:1:5: warning: incompatible redeclaration of library function 'cabsf' [-Wincompatible-library-redeclaration]\r\n int cabsf (void);\r\n ^\r\n _configtest.c:1:5: note: 'cabsf' is a builtin with type 'float (_Complex float)'\r\n _configtest.c:2:5: warning: incompatible redeclaration of library function 'cacosf' [-Wincompatible-library-redeclaration]\r\n int cacosf (void);\r\n ^\r\n _configtest.c:2:5: note: 'cacosf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:3:5: warning: incompatible redeclaration of library function 'cacoshf' [-Wincompatible-library-redeclaration]\r\n int cacoshf (void);\r\n ^\r\n _configtest.c:3:5: note: 'cacoshf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:4:5: warning: incompatible redeclaration of library function 'cargf' [-Wincompatible-library-redeclaration]\r\n int cargf (void);\r\n ^\r\n _configtest.c:4:5: note: 'cargf' is a builtin with type 'float (_Complex float)'\r\n _configtest.c:5:5: warning: incompatible redeclaration of library function 'casinf' [-Wincompatible-library-redeclaration]\r\n int casinf (void);\r\n ^\r\n _configtest.c:5:5: note: 'casinf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:6:5: warning: incompatible redeclaration of library function 'casinhf' [-Wincompatible-library-redeclaration]\r\n int casinhf (void);\r\n ^\r\n _configtest.c:6:5: note: 'casinhf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:7:5: warning: incompatible redeclaration of library function 'catanf' [-Wincompatible-library-redeclaration]\r\n int catanf (void);\r\n ^\r\n _configtest.c:7:5: note: 'catanf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:8:5: warning: incompatible redeclaration of library function 'catanhf' [-Wincompatible-library-redeclaration]\r\n int catanhf (void);\r\n ^\r\n _configtest.c:8:5: note: 'catanhf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:9:5: warning: incompatible redeclaration of library function 'ccosf' [-Wincompatible-library-redeclaration]\r\n int ccosf (void);\r\n ^\r\n _configtest.c:9:5: note: 'ccosf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:10:5: warning: incompatible redeclaration of library function 'ccoshf' [-Wincompatible-library-redeclaration]\r\n int ccoshf (void);\r\n ^\r\n _configtest.c:10:5: note: 'ccoshf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:11:5: warning: incompatible redeclaration of library function 'cexpf' [-Wincompatible-library-redeclaration]\r\n int cexpf (void);\r\n ^\r\n _configtest.c:11:5: note: 'cexpf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:12:5: warning: incompatible redeclaration of library function 'cimagf' [-Wincompatible-library-redeclaration]\r\n int cimagf (void);\r\n ^\r\n _configtest.c:12:5: note: 'cimagf' is a builtin with type 'float (_Complex float)'\r\n _configtest.c:13:5: warning: incompatible redeclaration of library function 'clogf' [-Wincompatible-library-redeclaration]\r\n int clogf (void);\r\n ^\r\n _configtest.c:13:5: note: 'clogf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:14:5: warning: incompatible redeclaration of library function 'conjf' [-Wincompatible-library-redeclaration]\r\n int conjf (void);\r\n ^\r\n _configtest.c:14:5: note: 'conjf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:15:5: warning: incompatible redeclaration of library function 'cpowf' [-Wincompatible-library-redeclaration]\r\n int cpowf (void);\r\n ^\r\n _configtest.c:15:5: note: 'cpowf' is a builtin with type '_Complex float (_Complex float, _Complex float)'\r\n _configtest.c:16:5: warning: incompatible redeclaration of library function 'cprojf' [-Wincompatible-library-redeclaration]\r\n int cprojf (void);\r\n ^\r\n _configtest.c:16:5: note: 'cprojf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:17:5: warning: incompatible redeclaration of library function 'crealf' [-Wincompatible-library-redeclaration]\r\n int crealf (void);\r\n ^\r\n _configtest.c:17:5: note: 'crealf' is a builtin with type 'float (_Complex float)'\r\n _configtest.c:18:5: warning: incompatible redeclaration of library function 'csinf' [-Wincompatible-library-redeclaration]\r\n int csinf (void);\r\n ^\r\n _configtest.c:18:5: note: 'csinf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:19:5: warning: incompatible redeclaration of library function 'csinhf' [-Wincompatible-library-redeclaration]\r\n int csinhf (void);\r\n ^\r\n _configtest.c:19:5: note: 'csinhf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:20:5: warning: incompatible redeclaration of library function 'csqrtf' [-Wincompatible-library-redeclaration]\r\n int csqrtf (void);\r\n ^\r\n _configtest.c:20:5: note: 'csqrtf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:21:5: warning: incompatible redeclaration of library function 'ctanf' [-Wincompatible-library-redeclaration]\r\n int ctanf (void);\r\n ^\r\n _configtest.c:21:5: note: 'ctanf' is a builtin with type '_Complex float (_Complex float)'\r\n _configtest.c:22:5: warning: incompatible redeclaration of library function 'ctanhf' [-Wincompatible-library-redeclaration]\r\n int ctanhf (void);\r\n ^\r\n _configtest.c:22:5: note: 'ctanhf' is a builtin with type '_Complex float (_Complex float)'\r\n 22 warnings generated.\r\n clang _configtest.o -o _configtest\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d _configtest\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n _configtest.c:1:5: warning: incompatible redeclaration of library function 'cabsl' [-Wincompatible-library-redeclaration]\r\n int cabsl (void);\r\n ^\r\n _configtest.c:1:5: note: 'cabsl' is a builtin with type 'long double (_Complex long double)'\r\n _configtest.c:2:5: warning: incompatible redeclaration of library function 'cacosl' [-Wincompatible-library-redeclaration]\r\n int cacosl (void);\r\n ^\r\n _configtest.c:2:5: note: 'cacosl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:3:5: warning: incompatible redeclaration of library function 'cacoshl' [-Wincompatible-library-redeclaration]\r\n int cacoshl (void);\r\n ^\r\n _configtest.c:3:5: note: 'cacoshl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:4:5: warning: incompatible redeclaration of library function 'cargl' [-Wincompatible-library-redeclaration]\r\n int cargl (void);\r\n ^\r\n _configtest.c:4:5: note: 'cargl' is a builtin with type 'long double (_Complex long double)'\r\n _configtest.c:5:5: warning: incompatible redeclaration of library function 'casinl' [-Wincompatible-library-redeclaration]\r\n int casinl (void);\r\n ^\r\n _configtest.c:5:5: note: 'casinl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:6:5: warning: incompatible redeclaration of library function 'casinhl' [-Wincompatible-library-redeclaration]\r\n int casinhl (void);\r\n ^\r\n _configtest.c:6:5: note: 'casinhl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:7:5: warning: incompatible redeclaration of library function 'catanl' [-Wincompatible-library-redeclaration]\r\n int catanl (void);\r\n ^\r\n _configtest.c:7:5: note: 'catanl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:8:5: warning: incompatible redeclaration of library function 'catanhl' [-Wincompatible-library-redeclaration]\r\n int catanhl (void);\r\n ^\r\n _configtest.c:8:5: note: 'catanhl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:9:5: warning: incompatible redeclaration of library function 'ccosl' [-Wincompatible-library-redeclaration]\r\n int ccosl (void);\r\n ^\r\n _configtest.c:9:5: note: 'ccosl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:10:5: warning: incompatible redeclaration of library function 'ccoshl' [-Wincompatible-library-redeclaration]\r\n int ccoshl (void);\r\n ^\r\n _configtest.c:10:5: note: 'ccoshl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:11:5: warning: incompatible redeclaration of library function 'cexpl' [-Wincompatible-library-redeclaration]\r\n int cexpl (void);\r\n ^\r\n _configtest.c:11:5: note: 'cexpl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:12:5: warning: incompatible redeclaration of library function 'cimagl' [-Wincompatible-library-redeclaration]\r\n int cimagl (void);\r\n ^\r\n _configtest.c:12:5: note: 'cimagl' is a builtin with type 'long double (_Complex long double)'\r\n _configtest.c:13:5: warning: incompatible redeclaration of library function 'clogl' [-Wincompatible-library-redeclaration]\r\n int clogl (void);\r\n ^\r\n _configtest.c:13:5: note: 'clogl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:14:5: warning: incompatible redeclaration of library function 'conjl' [-Wincompatible-library-redeclaration]\r\n int conjl (void);\r\n ^\r\n _configtest.c:14:5: note: 'conjl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:15:5: warning: incompatible redeclaration of library function 'cpowl' [-Wincompatible-library-redeclaration]\r\n int cpowl (void);\r\n ^\r\n _configtest.c:15:5: note: 'cpowl' is a builtin with type '_Complex long double (_Complex long double, _Complex long double)'\r\n _configtest.c:16:5: warning: incompatible redeclaration of library function 'cprojl' [-Wincompatible-library-redeclaration]\r\n int cprojl (void);\r\n ^\r\n _configtest.c:16:5: note: 'cprojl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:17:5: warning: incompatible redeclaration of library function 'creall' [-Wincompatible-library-redeclaration]\r\n int creall (void);\r\n ^\r\n _configtest.c:17:5: note: 'creall' is a builtin with type 'long double (_Complex long double)'\r\n _configtest.c:18:5: warning: incompatible redeclaration of library function 'csinl' [-Wincompatible-library-redeclaration]\r\n int csinl (void);\r\n ^\r\n _configtest.c:18:5: note: 'csinl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:19:5: warning: incompatible redeclaration of library function 'csinhl' [-Wincompatible-library-redeclaration]\r\n int csinhl (void);\r\n ^\r\n _configtest.c:19:5: note: 'csinhl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:20:5: warning: incompatible redeclaration of library function 'csqrtl' [-Wincompatible-library-redeclaration]\r\n int csqrtl (void);\r\n ^\r\n _configtest.c:20:5: note: 'csqrtl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:21:5: warning: incompatible redeclaration of library function 'ctanl' [-Wincompatible-library-redeclaration]\r\n int ctanl (void);\r\n ^\r\n _configtest.c:21:5: note: 'ctanl' is a builtin with type '_Complex long double (_Complex long double)'\r\n _configtest.c:22:5: warning: incompatible redeclaration of library function 'ctanhl' [-Wincompatible-library-redeclaration]\r\n int ctanhl (void);\r\n ^\r\n _configtest.c:22:5: note: 'ctanhl' is a builtin with type '_Complex long double (_Complex long double)'\r\n 22 warnings generated.\r\n clang _configtest.o -o _configtest\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d _configtest\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n _configtest.c:2:12: warning: unused function 'static_func' [-Wunused-function]\r\n static int static_func (char * restrict a)\r\n ^\r\n 1 warning generated.\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n _configtest.c:3:19: warning: unused function 'static_func' [-Wunused-function]\r\n static inline int static_func (void)\r\n ^\r\n 1 warning generated.\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n File: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/config.h\r\n #define SIZEOF_PY_INTPTR_T 8\r\n #define SIZEOF_OFF_T 8\r\n #define SIZEOF_PY_LONG_LONG 8\r\n #define MATHLIB\r\n #define HAVE_SIN 1\r\n #define HAVE_COS 1\r\n #define HAVE_TAN 1\r\n #define HAVE_SINH 1\r\n #define HAVE_COSH 1\r\n #define HAVE_TANH 1\r\n #define HAVE_FABS 1\r\n #define HAVE_FLOOR 1\r\n #define HAVE_CEIL 1\r\n #define HAVE_SQRT 1\r\n #define HAVE_LOG10 1\r\n #define HAVE_LOG 1\r\n #define HAVE_EXP 1\r\n #define HAVE_ASIN 1\r\n #define HAVE_ACOS 1\r\n #define HAVE_ATAN 1\r\n #define HAVE_FMOD 1\r\n #define HAVE_MODF 1\r\n #define HAVE_FREXP 1\r\n #define HAVE_LDEXP 1\r\n #define HAVE_RINT 1\r\n #define HAVE_TRUNC 1\r\n #define HAVE_EXP2 1\r\n #define HAVE_LOG2 1\r\n #define HAVE_ATAN2 1\r\n #define HAVE_POW 1\r\n #define HAVE_NEXTAFTER 1\r\n #define HAVE_STRTOLL 1\r\n #define HAVE_STRTOULL 1\r\n #define HAVE_CBRT 1\r\n #define HAVE_STRTOLD_L 1\r\n #define HAVE_BACKTRACE 1\r\n #define HAVE_MADVISE 1\r\n #define HAVE_XMMINTRIN_H 1\r\n #define HAVE_EMMINTRIN_H 1\r\n #define HAVE_XLOCALE_H 1\r\n #define HAVE_DLFCN_H 1\r\n #define HAVE_SYS_MMAN_H 1\r\n #define HAVE___BUILTIN_ISNAN 1\r\n #define HAVE___BUILTIN_ISINF 1\r\n #define HAVE___BUILTIN_ISFINITE 1\r\n #define HAVE___BUILTIN_BSWAP32 1\r\n #define HAVE___BUILTIN_BSWAP64 1\r\n #define HAVE___BUILTIN_EXPECT 1\r\n #define HAVE___BUILTIN_MUL_OVERFLOW 1\r\n #define HAVE___BUILTIN_CPU_SUPPORTS 1\r\n #define HAVE__M_FROM_INT64 1\r\n #define HAVE__MM_LOAD_PS 1\r\n #define HAVE__MM_PREFETCH 1\r\n #define HAVE__MM_LOAD_PD 1\r\n #define HAVE___BUILTIN_PREFETCH 1\r\n #define HAVE_LINK_AVX 1\r\n #define HAVE_LINK_AVX2 1\r\n #define HAVE_XGETBV 1\r\n #define HAVE_ATTRIBUTE_NONNULL 1\r\n #define HAVE_ATTRIBUTE_TARGET_AVX 1\r\n #define HAVE_ATTRIBUTE_TARGET_AVX2 1\r\n #define HAVE___THREAD 1\r\n #define HAVE_SINF 1\r\n #define HAVE_COSF 1\r\n #define HAVE_TANF 1\r\n #define HAVE_SINHF 1\r\n #define HAVE_COSHF 1\r\n #define HAVE_TANHF 1\r\n #define HAVE_FABSF 1\r\n #define HAVE_FLOORF 1\r\n #define HAVE_CEILF 1\r\n #define HAVE_RINTF 1\r\n #define HAVE_TRUNCF 1\r\n #define HAVE_SQRTF 1\r\n #define HAVE_LOG10F 1\r\n #define HAVE_LOGF 1\r\n #define HAVE_LOG1PF 1\r\n #define HAVE_EXPF 1\r\n #define HAVE_EXPM1F 1\r\n #define HAVE_ASINF 1\r\n #define HAVE_ACOSF 1\r\n #define HAVE_ATANF 1\r\n #define HAVE_ASINHF 1\r\n #define HAVE_ACOSHF 1\r\n #define HAVE_ATANHF 1\r\n #define HAVE_HYPOTF 1\r\n #define HAVE_ATAN2F 1\r\n #define HAVE_POWF 1\r\n #define HAVE_FMODF 1\r\n #define HAVE_MODFF 1\r\n #define HAVE_FREXPF 1\r\n #define HAVE_LDEXPF 1\r\n #define HAVE_EXP2F 1\r\n #define HAVE_LOG2F 1\r\n #define HAVE_COPYSIGNF 1\r\n #define HAVE_NEXTAFTERF 1\r\n #define HAVE_CBRTF 1\r\n #define HAVE_SINL 1\r\n #define HAVE_COSL 1\r\n #define HAVE_TANL 1\r\n #define HAVE_SINHL 1\r\n #define HAVE_COSHL 1\r\n #define HAVE_TANHL 1\r\n #define HAVE_FABSL 1\r\n #define HAVE_FLOORL 1\r\n #define HAVE_CEILL 1\r\n #define HAVE_RINTL 1\r\n #define HAVE_TRUNCL 1\r\n #define HAVE_SQRTL 1\r\n #define HAVE_LOG10L 1\r\n #define HAVE_LOGL 1\r\n #define HAVE_LOG1PL 1\r\n #define HAVE_EXPL 1\r\n #define HAVE_EXPM1L 1\r\n #define HAVE_ASINL 1\r\n #define HAVE_ACOSL 1\r\n #define HAVE_ATANL 1\r\n #define HAVE_ASINHL 1\r\n #define HAVE_ACOSHL 1\r\n #define HAVE_ATANHL 1\r\n #define HAVE_HYPOTL 1\r\n #define HAVE_ATAN2L 1\r\n #define HAVE_POWL 1\r\n #define HAVE_FMODL 1\r\n #define HAVE_MODFL 1\r\n #define HAVE_FREXPL 1\r\n #define HAVE_LDEXPL 1\r\n #define HAVE_EXP2L 1\r\n #define HAVE_LOG2L 1\r\n #define HAVE_COPYSIGNL 1\r\n #define HAVE_NEXTAFTERL 1\r\n #define HAVE_CBRTL 1\r\n #define HAVE_DECL_SIGNBIT\r\n #define HAVE_COMPLEX_H 1\r\n #define HAVE_CABS 1\r\n #define HAVE_CACOS 1\r\n #define HAVE_CACOSH 1\r\n #define HAVE_CARG 1\r\n #define HAVE_CASIN 1\r\n #define HAVE_CASINH 1\r\n #define HAVE_CATAN 1\r\n #define HAVE_CATANH 1\r\n #define HAVE_CCOS 1\r\n #define HAVE_CCOSH 1\r\n #define HAVE_CEXP 1\r\n #define HAVE_CIMAG 1\r\n #define HAVE_CLOG 1\r\n #define HAVE_CONJ 1\r\n #define HAVE_CPOW 1\r\n #define HAVE_CPROJ 1\r\n #define HAVE_CREAL 1\r\n #define HAVE_CSIN 1\r\n #define HAVE_CSINH 1\r\n #define HAVE_CSQRT 1\r\n #define HAVE_CTAN 1\r\n #define HAVE_CTANH 1\r\n #define HAVE_CABSF 1\r\n #define HAVE_CACOSF 1\r\n #define HAVE_CACOSHF 1\r\n #define HAVE_CARGF 1\r\n #define HAVE_CASINF 1\r\n #define HAVE_CASINHF 1\r\n #define HAVE_CATANF 1\r\n #define HAVE_CATANHF 1\r\n #define HAVE_CCOSF 1\r\n #define HAVE_CCOSHF 1\r\n #define HAVE_CEXPF 1\r\n #define HAVE_CIMAGF 1\r\n #define HAVE_CLOGF 1\r\n #define HAVE_CONJF 1\r\n #define HAVE_CPOWF 1\r\n #define HAVE_CPROJF 1\r\n #define HAVE_CREALF 1\r\n #define HAVE_CSINF 1\r\n #define HAVE_CSINHF 1\r\n #define HAVE_CSQRTF 1\r\n #define HAVE_CTANF 1\r\n #define HAVE_CTANHF 1\r\n #define HAVE_CABSL 1\r\n #define HAVE_CACOSL 1\r\n #define HAVE_CACOSHL 1\r\n #define HAVE_CARGL 1\r\n #define HAVE_CASINL 1\r\n #define HAVE_CASINHL 1\r\n #define HAVE_CATANL 1\r\n #define HAVE_CATANHL 1\r\n #define HAVE_CCOSL 1\r\n #define HAVE_CCOSHL 1\r\n #define HAVE_CEXPL 1\r\n #define HAVE_CIMAGL 1\r\n #define HAVE_CLOGL 1\r\n #define HAVE_CONJL 1\r\n #define HAVE_CPOWL 1\r\n #define HAVE_CPROJL 1\r\n #define HAVE_CREALL 1\r\n #define HAVE_CSINL 1\r\n #define HAVE_CSINHL 1\r\n #define HAVE_CSQRTL 1\r\n #define HAVE_CTANL 1\r\n #define HAVE_CTANHL 1\r\n #define NPY_RESTRICT restrict\r\n #define NPY_RELAXED_STRIDES_CHECKING 1\r\n #define HAVE_LDOUBLE_INTEL_EXTENDED_16_BYTES_LE 1\r\n #define NPY_PY3K 1\r\n #ifndef __cplusplus\r\n \/* #undef inline *\/\r\n #endif\r\n \r\n #ifndef _NPY_NPY_CONFIG_H_\r\n #error config.h should never be included directly, include npy_config.h instead\r\n #endif\r\n \r\n EOF\r\n adding 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/config.h' to sources.\r\n Generating build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/_numpyconfig.h\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n _configtest.c:1:5: warning: incompatible redeclaration of library function 'exp' [-Wincompatible-library-redeclaration]\r\n int exp (void);\r\n ^\r\n _configtest.c:1:5: note: 'exp' is a builtin with type 'double (double)'\r\n 1 warning generated.\r\n clang _configtest.o -o _configtest\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d _configtest\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -c'\r\n clang: _configtest.c\r\n success!\r\n removing: _configtest.c _configtest.o _configtest.o.d\r\n File: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/_numpyconfig.h\r\n #define NPY_SIZEOF_SHORT SIZEOF_SHORT\r\n #define NPY_SIZEOF_INT SIZEOF_INT\r\n #define NPY_SIZEOF_LONG SIZEOF_LONG\r\n #define NPY_SIZEOF_FLOAT 4\r\n #define NPY_SIZEOF_COMPLEX_FLOAT 8\r\n #define NPY_SIZEOF_DOUBLE 8\r\n #define NPY_SIZEOF_COMPLEX_DOUBLE 16\r\n #define NPY_SIZEOF_LONGDOUBLE 16\r\n #define NPY_SIZEOF_COMPLEX_LONGDOUBLE 32\r\n #define NPY_SIZEOF_PY_INTPTR_T 8\r\n #define NPY_SIZEOF_OFF_T 8\r\n #define NPY_SIZEOF_PY_LONG_LONG 8\r\n #define NPY_SIZEOF_LONGLONG 8\r\n #define NPY_NO_SMP 0\r\n #define NPY_HAVE_DECL_ISNAN\r\n #define NPY_HAVE_DECL_ISINF\r\n #define NPY_HAVE_DECL_ISFINITE\r\n #define NPY_HAVE_DECL_SIGNBIT\r\n #define NPY_USE_C99_COMPLEX 1\r\n #define NPY_HAVE_COMPLEX_DOUBLE 1\r\n #define NPY_HAVE_COMPLEX_FLOAT 1\r\n #define NPY_HAVE_COMPLEX_LONG_DOUBLE 1\r\n #define NPY_RELAXED_STRIDES_CHECKING 1\r\n #define NPY_USE_C99_FORMATS 1\r\n #define NPY_VISIBILITY_HIDDEN __attribute__((visibility(\"hidden\")))\r\n #define NPY_ABI_VERSION 0x01000009\r\n #define NPY_API_VERSION 0x0000000D\r\n \r\n #ifndef __STDC_FORMAT_MACROS\r\n #define __STDC_FORMAT_MACROS 1\r\n #endif\r\n \r\n EOF\r\n adding 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/_numpyconfig.h' to sources.\r\n executing numpy\/core\/code_generators\/generate_numpy_api.py\r\n adding 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/__multiarray_api.h' to sources.\r\n numpy.core - nothing done with h_files = ['build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/config.h', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/_numpyconfig.h', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/__multiarray_api.h']\r\n building extension \"numpy.core._multiarray_tests\" sources\r\n creating build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/_multiarray_tests.c\r\n building extension \"numpy.core._multiarray_umath\" sources\r\n adding 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/config.h' to sources.\r\n adding 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/_numpyconfig.h' to sources.\r\n executing numpy\/core\/code_generators\/generate_numpy_api.py\r\n adding 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/__multiarray_api.h' to sources.\r\n executing numpy\/core\/code_generators\/generate_ufunc_api.py\r\n adding 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/__ufunc_api.h' to sources.\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/arraytypes.c\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/einsum.c\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/lowlevel_strided_loops.c\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/nditer_templ.c\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/scalartypes.c\r\n creating build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/funcs.inc\r\n adding 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath' to include_dirs.\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/simd.inc\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/loops.h\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/loops.c\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/matmul.h\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/matmul.c\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/scalarmath.c\r\n adding 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath' to include_dirs.\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/templ_common.h\r\n adding 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common' to include_dirs.\r\n numpy.core - nothing done with h_files = ['build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/funcs.inc', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/simd.inc', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/loops.h', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/matmul.h', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\/npy_math_internal.h', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/templ_common.h', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/config.h', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/_numpyconfig.h', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/__multiarray_api.h', 'build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/__ufunc_api.h']\r\n building extension \"numpy.core._umath_tests\" sources\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_umath_tests.c\r\n building extension \"numpy.core._rational_tests\" sources\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_rational_tests.c\r\n building extension \"numpy.core._struct_ufunc_tests\" sources\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_struct_ufunc_tests.c\r\n building extension \"numpy.core._operand_flag_tests\" sources\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_operand_flag_tests.c\r\n building extension \"numpy.fft.fftpack_lite\" sources\r\n building extension \"numpy.linalg.lapack_lite\" sources\r\n creating build\/src.macosx-10.15-x86_64-3.9\/numpy\/linalg\r\n adding 'numpy\/linalg\/lapack_lite\/python_xerbla.c' to sources.\r\n building extension \"numpy.linalg._umath_linalg\" sources\r\n adding 'numpy\/linalg\/lapack_lite\/python_xerbla.c' to sources.\r\n conv_template:> build\/src.macosx-10.15-x86_64-3.9\/numpy\/linalg\/umath_linalg.c\r\n building extension \"numpy.random.mtrand\" sources\r\n creating build\/src.macosx-10.15-x86_64-3.9\/numpy\/random\r\n building data_files sources\r\n build_src: building npy-pkg config files\r\n running build_py\r\n creating build\/lib.macosx-10.15-x86_64-3.9\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying numpy\/conftest.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying numpy\/version.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying numpy\/_globals.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying numpy\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying numpy\/dual.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying numpy\/_distributor_init.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying numpy\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying numpy\/ctypeslib.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying numpy\/matlib.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying numpy\/_pytesttester.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n copying build\/src.macosx-10.15-x86_64-3.9\/numpy\/__config__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/compat\r\n copying numpy\/compat\/py3k.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/compat\r\n copying numpy\/compat\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/compat\r\n copying numpy\/compat\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/compat\r\n copying numpy\/compat\/_inspect.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/compat\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/umath.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/fromnumeric.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/_dtype.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/_add_newdocs.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/_methods.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/_internal.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/_string_helpers.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/multiarray.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/records.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/setup_common.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/_aliased_types.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/memmap.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/overrides.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/getlimits.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/_dtype_ctypes.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/defchararray.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/shape_base.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/machar.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/numeric.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/function_base.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/einsumfunc.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/umath_tests.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/info.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/numerictypes.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/_type_aliases.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/cversions.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/arrayprint.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n copying numpy\/core\/code_generators\/generate_numpy_api.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/unixccompiler.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/numpy_distribution.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/conv_template.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/cpuinfo.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/ccompiler.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/msvc9compiler.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/npy_pkg_config.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/compat.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/misc_util.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/log.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/line_endings.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/lib2def.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/pathccompiler.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/system_info.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/core.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/__version__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/exec_command.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/from_template.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/mingw32ccompiler.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/extension.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/msvccompiler.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/intelccompiler.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying numpy\/distutils\/info.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n copying build\/src.macosx-10.15-x86_64-3.9\/numpy\/distutils\/__config__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/build.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/config_compiler.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/build_ext.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/config.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/install_headers.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/build_py.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/build_src.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/sdist.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/build_scripts.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/bdist_rpm.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/install_clib.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/build_clib.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/autodist.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/egg_info.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/install.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/develop.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n copying numpy\/distutils\/command\/install_data.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/command\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/gnu.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/compaq.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/intel.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/none.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/nag.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/pg.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/ibm.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/sun.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/lahey.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/g95.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/mips.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/hpux.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/environment.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/pathf95.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/absoft.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n copying numpy\/distutils\/fcompiler\/vast.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/distutils\/fcompiler\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/misc.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/internals.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/creation.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/constants.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/ufuncs.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/broadcasting.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/basics.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/subclassing.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/indexing.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/byteswapping.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/structured_arrays.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n copying numpy\/doc\/glossary.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/doc\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/cfuncs.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/common_rules.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/crackfortran.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/cb_rules.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/rules.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/f2py2e.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/func2subr.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/__version__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/diagnose.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/capi_maps.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/f90mod_rules.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/f2py_testing.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/use_rules.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/info.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/auxfuncs.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n copying numpy\/f2py\/__main__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/f2py\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/fft\r\n copying numpy\/fft\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/fft\r\n copying numpy\/fft\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/fft\r\n copying numpy\/fft\/helper.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/fft\r\n copying numpy\/fft\/fftpack.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/fft\r\n copying numpy\/fft\/info.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/fft\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/_iotools.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/mixins.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/nanfunctions.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/recfunctions.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/histograms.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/scimath.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/_version.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/user_array.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/format.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/twodim_base.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/financial.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/index_tricks.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/npyio.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/shape_base.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/stride_tricks.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/utils.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/arrayterator.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/function_base.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/arraysetops.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/arraypad.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/type_check.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/info.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/polynomial.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/_datasource.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n copying numpy\/lib\/ufunclike.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/lib\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/linalg\r\n copying numpy\/linalg\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/linalg\r\n copying numpy\/linalg\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/linalg\r\n copying numpy\/linalg\/linalg.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/linalg\r\n copying numpy\/linalg\/info.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/linalg\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/ma\r\n copying numpy\/ma\/extras.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/ma\r\n copying numpy\/ma\/version.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/ma\r\n copying numpy\/ma\/testutils.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/ma\r\n copying numpy\/ma\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/ma\r\n copying numpy\/ma\/core.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/ma\r\n copying numpy\/ma\/bench.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/ma\r\n copying numpy\/ma\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/ma\r\n copying numpy\/ma\/timer_comparison.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/ma\r\n copying numpy\/ma\/mrecords.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/ma\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/matrixlib\r\n copying numpy\/matrixlib\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/matrixlib\r\n copying numpy\/matrixlib\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/matrixlib\r\n copying numpy\/matrixlib\/defmatrix.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/matrixlib\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n copying numpy\/polynomial\/laguerre.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n copying numpy\/polynomial\/_polybase.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n copying numpy\/polynomial\/polyutils.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n copying numpy\/polynomial\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n copying numpy\/polynomial\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n copying numpy\/polynomial\/hermite_e.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n copying numpy\/polynomial\/chebyshev.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n copying numpy\/polynomial\/polynomial.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n copying numpy\/polynomial\/legendre.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n copying numpy\/polynomial\/hermite.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/polynomial\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/random\r\n copying numpy\/random\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/random\r\n copying numpy\/random\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/random\r\n copying numpy\/random\/info.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/random\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\r\n copying numpy\/testing\/nosetester.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\r\n copying numpy\/testing\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\r\n copying numpy\/testing\/noseclasses.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\r\n copying numpy\/testing\/setup.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\r\n copying numpy\/testing\/utils.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\r\n copying numpy\/testing\/print_coercion_tables.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\r\n copying numpy\/testing\/decorators.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\r\n creating build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\/_private\r\n copying numpy\/testing\/_private\/nosetester.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\/_private\r\n copying numpy\/testing\/_private\/__init__.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\/_private\r\n copying numpy\/testing\/_private\/noseclasses.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\/_private\r\n copying numpy\/testing\/_private\/utils.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\/_private\r\n copying numpy\/testing\/_private\/parameterized.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\/_private\r\n copying numpy\/testing\/_private\/decorators.py -> build\/lib.macosx-10.15-x86_64-3.9\/numpy\/testing\/_private\r\n running build_clib\r\n customize UnixCCompiler\r\n customize UnixCCompiler using build_clib\r\n building 'npymath' library\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n creating build\/temp.macosx-10.15-x86_64-3.9\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/build\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\r\n compile options: '-Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n clang: numpy\/core\/src\/npymath\/npy_math.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\/npy_math_complex.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\/ieee754.c\r\n clang: numpy\/core\/src\/npymath\/halffloat.c\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:48:33: warning: unused variable 'tiny' [-Wunused-const-variable]\r\n static const volatile npy_float tiny = 3.9443045e-31f;\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:67:25: warning: unused variable 'c_halff' [-Wunused-const-variable]\r\n static const npy_cfloat c_halff = {0.5F, 0.0};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:68:25: warning: unused variable 'c_if' [-Wunused-const-variable]\r\n static const npy_cfloat c_if = {0.0, 1.0F};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:69:25: warning: unused variable 'c_ihalff' [-Wunused-const-variable]\r\n static const npy_cfloat c_ihalff = {0.0, 0.5F};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:79:1: warning: unused function 'caddf' [-Wunused-function]\r\n caddf(npy_cfloat a, npy_cfloat b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:87:1: warning: unused function 'csubf' [-Wunused-function]\r\n csubf(npy_cfloat a, npy_cfloat b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:137:1: warning: unused function 'cnegf' [-Wunused-function]\r\n cnegf(npy_cfloat a)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:144:1: warning: unused function 'cmulif' [-Wunused-function]\r\n cmulif(npy_cfloat a)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:67:26: warning: unused variable 'c_half' [-Wunused-const-variable]\r\n static const npy_cdouble c_half = {0.5, 0.0};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:68:26: warning: unused variable 'c_i' [-Wunused-const-variable]\r\n static const npy_cdouble c_i = {0.0, 1.0};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:69:26: warning: unused variable 'c_ihalf' [-Wunused-const-variable]\r\n static const npy_cdouble c_ihalf = {0.0, 0.5};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:79:1: warning: unused function 'cadd' [-Wunused-function]\r\n cadd(npy_cdouble a, npy_cdouble b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:87:1: warning: unused function 'csub' [-Wunused-function]\r\n csub(npy_cdouble a, npy_cdouble b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:137:1: warning: unused function 'cneg' [-Wunused-function]\r\n cneg(npy_cdouble a)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:144:1: warning: unused function 'cmuli' [-Wunused-function]\r\n cmuli(npy_cdouble a)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:67:30: warning: unused variable 'c_halfl' [-Wunused-const-variable]\r\n static const npy_clongdouble c_halfl = {0.5L, 0.0};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:68:30: warning: unused variable 'c_il' [-Wunused-const-variable]\r\n static const npy_clongdouble c_il = {0.0, 1.0L};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:69:30: warning: unused variable 'c_ihalfl' [-Wunused-const-variable]\r\n static const npy_clongdouble c_ihalfl = {0.0, 0.5L};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:79:1: warning: unused function 'caddl' [-Wunused-function]\r\n caddl(npy_clongdouble a, npy_clongdouble b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:87:1: warning: unused function 'csubl' [-Wunused-function]\r\n csubl(npy_clongdouble a, npy_clongdouble b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:137:1: warning: unused function 'cnegl' [-Wunused-function]\r\n cnegl(npy_clongdouble a)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:144:1: warning: unused function 'cmulil' [-Wunused-function]\r\n cmulil(npy_clongdouble a)\r\n ^\r\n 22 warnings generated.\r\n ar: adding 4 object files to build\/temp.macosx-10.15-x86_64-3.9\/libnpymath.a\r\n ranlib:@ build\/temp.macosx-10.15-x86_64-3.9\/libnpymath.a\r\n building 'npysort' library\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n creating build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npysort\r\n compile options: '-Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npysort\/quicksort.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npysort\/mergesort.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npysort\/heapsort.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npysort\/selection.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npysort\/binsearch.c\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/npysort\/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp k;\r\n ^~~~~~~~~~~\r\n numpy\/core\/src\/npysort\/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead\r\n else if (0 && kth == num - 1) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n 22 warnings generated.\r\n ar: adding 5 object files to build\/temp.macosx-10.15-x86_64-3.9\/libnpysort.a\r\n ranlib:@ build\/temp.macosx-10.15-x86_64-3.9\/libnpysort.a\r\n running build_ext\r\n customize UnixCCompiler\r\n customize UnixCCompiler using build_ext\r\n building 'numpy.core._dummy' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n clang: numpy\/core\/src\/dummymodule.c\r\n clang -bundle -undefined dynamic_lookup -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/dummymodule.o -L\/usr\/local\/lib -L\/usr\/local\/opt\/openssl@1.1\/lib -L\/usr\/local\/opt\/sqlite\/lib -Lbuild\/temp.macosx-10.15-x86_64-3.9 -o build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\/_dummy.cpython-39-darwin.so\r\n building 'numpy.core._multiarray_tests' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n creating build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\r\n compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/_multiarray_tests.c\r\n clang: numpy\/core\/src\/common\/mem_overlap.c\r\n clang -bundle -undefined dynamic_lookup -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/_multiarray_tests.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/mem_overlap.o -L\/usr\/local\/lib -L\/usr\/local\/opt\/openssl@1.1\/lib -L\/usr\/local\/opt\/sqlite\/lib -Lbuild\/temp.macosx-10.15-x86_64-3.9 -lnpymath -o build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\/_multiarray_tests.cpython-39-darwin.so\r\n building 'numpy.core._multiarray_umath' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\/var\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\/var\/folders\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\/var\/folders\/fz\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\/var\/folders\/fz\/0j719tys48x7jlnjnwc69smr0000gn\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\/var\/folders\/fz\/0j719tys48x7jlnjnwc69smr0000gn\/T\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\/var\/folders\/fz\/0j719tys48x7jlnjnwc69smr0000gn\/T\/pip-install-ufzck51l\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\/var\/folders\/fz\/0j719tys48x7jlnjnwc69smr0000gn\/T\/pip-install-ufzck51l\/numpy_b0e8a3953a1d4b46801f12bcea55536e\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\/var\/folders\/fz\/0j719tys48x7jlnjnwc69smr0000gn\/T\/pip-install-ufzck51l\/numpy_b0e8a3953a1d4b46801f12bcea55536e\/numpy\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\/var\/folders\/fz\/0j719tys48x7jlnjnwc69smr0000gn\/T\/pip-install-ufzck51l\/numpy_b0e8a3953a1d4b46801f12bcea55536e\/numpy\/_build_utils\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/private\/var\/folders\/fz\/0j719tys48x7jlnjnwc69smr0000gn\/T\/pip-install-ufzck51l\/numpy_b0e8a3953a1d4b46801f12bcea55536e\/numpy\/_build_utils\/src\r\n compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -DNO_ATLAS_INFO=3 -DHAVE_CBLAS -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n extra options: '-msse3 -I\/System\/Library\/Frameworks\/vecLib.framework\/Headers'\r\n clang: numpy\/core\/src\/multiarray\/alloc.c\r\n clang: numpy\/core\/src\/multiarray\/calculation.cclang: numpy\/core\/src\/multiarray\/array_assign_scalar.c\r\n clang: numpy\/core\/src\/multiarray\/convert.c\r\n \r\n clang: numpy\/core\/src\/multiarray\/ctors.c\r\n clang: numpy\/core\/src\/multiarray\/datetime_busday.c\r\n clang: numpy\/core\/src\/multiarray\/dragon4.cclang: numpy\/core\/src\/multiarray\/flagsobject.c\r\n \r\n numpy\/core\/src\/multiarray\/ctors.c:2261:36: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n if (!(PyUString_Check(name) && PyUString_GET_SIZE(name) == 0)) {\r\n ^\r\n numpy\/core\/include\/numpy\/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'\r\n #define PyUString_GET_SIZE PyUnicode_GET_SIZE\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/ctors.c:2261:36: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n if (!(PyUString_Check(name) && PyUString_GET_SIZE(name) == 0)) {\r\n ^\r\n numpy\/core\/include\/numpy\/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'\r\n #define PyUString_GET_SIZE PyUnicode_GET_SIZE\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/ctors.c:2261:36: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n if (!(PyUString_Check(name) && PyUString_GET_SIZE(name) == 0)) {\r\n ^\r\n numpy\/core\/include\/numpy\/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'\r\n #define PyUString_GET_SIZE PyUnicode_GET_SIZE\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n clang: numpy\/core\/src\/multiarray\/arrayobject.c\r\n clang: numpy\/core\/src\/multiarray\/array_assign_array.c\r\n clang: numpy\/core\/src\/multiarray\/convert_datatype.c\r\n clang: numpy\/core\/src\/multiarray\/getset.c\r\n clang: numpy\/core\/src\/multiarray\/datetime_busdaycal.c\r\n clang: numpy\/core\/src\/multiarray\/buffer.c\r\n clang: numpy\/core\/src\/multiarray\/compiled_base.c\r\n clang: numpy\/core\/src\/multiarray\/hashdescr.c\r\n clang: numpy\/core\/src\/multiarray\/descriptor.c\r\n numpy\/core\/src\/multiarray\/descriptor.c:453:13: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n if (PyUString_GET_SIZE(name) == 0) {\r\n ^\r\n numpy\/core\/include\/numpy\/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'\r\n #define PyUString_GET_SIZE PyUnicode_GET_SIZE\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/descriptor.c:453:13: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n if (PyUString_GET_SIZE(name) == 0) {\r\n ^\r\n numpy\/core\/include\/numpy\/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'\r\n #define PyUString_GET_SIZE PyUnicode_GET_SIZE\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/descriptor.c:453:13: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n if (PyUString_GET_SIZE(name) == 0) {\r\n ^\r\n numpy\/core\/include\/numpy\/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'\r\n #define PyUString_GET_SIZE PyUnicode_GET_SIZE\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/descriptor.c:460:48: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n else if (PyUString_Check(title) && PyUString_GET_SIZE(title) > 0) {\r\n ^\r\n numpy\/core\/include\/numpy\/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'\r\n #define PyUString_GET_SIZE PyUnicode_GET_SIZE\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/descriptor.c:460:48: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n else if (PyUString_Check(title) && PyUString_GET_SIZE(title) > 0) {\r\n ^\r\n numpy\/core\/include\/numpy\/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'\r\n #define PyUString_GET_SIZE PyUnicode_GET_SIZE\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/descriptor.c:460:48: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n else if (PyUString_Check(title) && PyUString_GET_SIZE(title) > 0) {\r\n ^\r\n numpy\/core\/include\/numpy\/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'\r\n #define PyUString_GET_SIZE PyUnicode_GET_SIZE\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n clang: numpy\/core\/src\/multiarray\/conversion_utils.c\r\n clang: numpy\/core\/src\/multiarray\/item_selection.c\r\n clang: numpy\/core\/src\/multiarray\/dtype_transfer.c\r\n clang: numpy\/core\/src\/multiarray\/mapping.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/arraytypes.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/nditer_templ.c\r\n 3 warnings generated.\r\n clang: numpy\/core\/src\/multiarray\/datetime.c\r\n numpy\/core\/src\/multiarray\/arraytypes.c.src:477:11: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n ptr = PyUnicode_AS_UNICODE(temp);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'\r\n PyUnicode_AsUnicode(_PyObject_CAST(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/arraytypes.c.src:482:15: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n datalen = PyUnicode_GET_DATA_SIZE(temp);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/arraytypes.c.src:482:15: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n datalen = PyUnicode_GET_DATA_SIZE(temp);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/arraytypes.c.src:482:15: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n datalen = PyUnicode_GET_DATA_SIZE(temp);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n clang: numpy\/core\/src\/multiarray\/common.c\r\n numpy\/core\/src\/multiarray\/common.c:187:28: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n itemsize = PyUnicode_GET_DATA_SIZE(temp);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/common.c:187:28: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n itemsize = PyUnicode_GET_DATA_SIZE(temp);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/common.c:187:28: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n itemsize = PyUnicode_GET_DATA_SIZE(temp);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/common.c:239:28: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n itemsize = PyUnicode_GET_DATA_SIZE(temp);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/common.c:239:28: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n itemsize = PyUnicode_GET_DATA_SIZE(temp);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/common.c:239:28: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n itemsize = PyUnicode_GET_DATA_SIZE(temp);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/common.c:282:24: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n int itemsize = PyUnicode_GET_DATA_SIZE(obj);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/common.c:282:24: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n int itemsize = PyUnicode_GET_DATA_SIZE(obj);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/common.c:282:24: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n int itemsize = PyUnicode_GET_DATA_SIZE(obj);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n 6 warnings generated.\r\n clang: numpy\/core\/src\/multiarray\/nditer_pywrap.c\r\n 9 warnings generated.\r\n clang: numpy\/core\/src\/multiarray\/sequence.c\r\n clang: numpy\/core\/src\/multiarray\/shape.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/einsum.c\r\n clang: numpy\/core\/src\/multiarray\/methods.c\r\n clang: numpy\/core\/src\/multiarray\/iterators.c\r\n clang: numpy\/core\/src\/multiarray\/datetime_strings.c\r\n clang: numpy\/core\/src\/multiarray\/number.c\r\n clang: numpy\/core\/src\/multiarray\/scalarapi.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/scalartypes.c\r\n numpy\/core\/src\/multiarray\/scalarapi.c:74:28: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n return (void *)PyUnicode_AS_DATA(scalar);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:283:21: note: expanded from macro 'PyUnicode_AS_DATA'\r\n ((const char *)(PyUnicode_AS_UNICODE(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'\r\n PyUnicode_AsUnicode(_PyObject_CAST(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalarapi.c:135:28: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n return (void *)PyUnicode_AS_DATA(scalar);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:283:21: note: expanded from macro 'PyUnicode_AS_DATA'\r\n ((const char *)(PyUnicode_AS_UNICODE(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'\r\n PyUnicode_AsUnicode(_PyObject_CAST(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalarapi.c:568:29: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n descr->elsize = PyUnicode_GET_DATA_SIZE(sc);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalarapi.c:568:29: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n descr->elsize = PyUnicode_GET_DATA_SIZE(sc);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalarapi.c:568:29: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n descr->elsize = PyUnicode_GET_DATA_SIZE(sc);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:475:17: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n ip = dptr = PyUnicode_AS_UNICODE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'\r\n PyUnicode_AsUnicode(_PyObject_CAST(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:476:11: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n len = PyUnicode_GET_SIZE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:476:11: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n len = PyUnicode_GET_SIZE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:476:11: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n len = PyUnicode_GET_SIZE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:481:11: warning: 'PyUnicode_FromUnicode' is deprecated [-Wdeprecated-declarations]\r\n new = PyUnicode_FromUnicode(ip, len);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:551:1: note: 'PyUnicode_FromUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(PyObject*) PyUnicode_FromUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:475:17: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n ip = dptr = PyUnicode_AS_UNICODE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'\r\n PyUnicode_AsUnicode(_PyObject_CAST(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:476:11: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n len = PyUnicode_GET_SIZE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:476:11: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n len = PyUnicode_GET_SIZE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:476:11: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n len = PyUnicode_GET_SIZE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:481:11: warning: 'PyUnicode_FromUnicode' is deprecated [-Wdeprecated-declarations]\r\n new = PyUnicode_FromUnicode(ip, len);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:551:1: note: 'PyUnicode_FromUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(PyObject*) PyUnicode_FromUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:1849:18: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n buffer = PyUnicode_AS_DATA(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:283:21: note: expanded from macro 'PyUnicode_AS_DATA'\r\n ((const char *)(PyUnicode_AS_UNICODE(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'\r\n PyUnicode_AsUnicode(_PyObject_CAST(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:1850:18: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n buflen = PyUnicode_GET_DATA_SIZE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:1850:18: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n buflen = PyUnicode_GET_DATA_SIZE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/scalartypes.c.src:1850:18: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n buflen = PyUnicode_GET_DATA_SIZE(self);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'\r\n (PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n 5 warnings generated.\r\n clang: numpy\/core\/src\/multiarray\/typeinfo.c\r\n clang: numpy\/core\/src\/multiarray\/refcount.c\r\n clang: numpy\/core\/src\/multiarray\/usertypes.c\r\n clang: numpy\/core\/src\/multiarray\/multiarraymodule.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/lowlevel_strided_loops.c\r\n clang: numpy\/core\/src\/multiarray\/vdot.c\r\n clang: numpy\/core\/src\/umath\/umathmodule.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/matmul.c\r\n clang: numpy\/core\/src\/umath\/reduction.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/loops.c\r\n clang: numpy\/core\/src\/multiarray\/nditer_api.c\r\n 14 warnings generated.\r\n clang: numpy\/core\/src\/multiarray\/strfuncs.c\r\n numpy\/core\/src\/umath\/loops.c.src:655:18: warning: 'PyEval_CallObjectWithKeywords' is deprecated [-Wdeprecated-declarations]\r\n result = PyEval_CallObject(tocall, arglist);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/ceval.h:24:5: note: expanded from macro 'PyEval_CallObject'\r\n PyEval_CallObjectWithKeywords(callable, arg, (PyObject *)NULL)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/ceval.h:17:1: note: 'PyEval_CallObjectWithKeywords' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallObjectWithKeywords(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/strfuncs.c:178:13: warning: 'PyEval_CallObjectWithKeywords' is deprecated [-Wdeprecated-declarations]\r\n s = PyEval_CallObject(PyArray_ReprFunction, arglist);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/ceval.h:24:5: note: expanded from macro 'PyEval_CallObject'\r\n PyEval_CallObjectWithKeywords(callable, arg, (PyObject *)NULL)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/ceval.h:17:1: note: 'PyEval_CallObjectWithKeywords' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallObjectWithKeywords(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/core\/src\/multiarray\/strfuncs.c:195:13: warning: 'PyEval_CallObjectWithKeywords' is deprecated [-Wdeprecated-declarations]\r\n s = PyEval_CallObject(PyArray_StrFunction, arglist);\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/ceval.h:24:5: note: expanded from macro 'PyEval_CallObject'\r\n PyEval_CallObjectWithKeywords(callable, arg, (PyObject *)NULL)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/ceval.h:17:1: note: 'PyEval_CallObjectWithKeywords' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallObjectWithKeywords(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n 2 warnings generated.\r\n clang: numpy\/core\/src\/multiarray\/temp_elide.c\r\n clang: numpy\/core\/src\/umath\/cpuid.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/scalarmath.c\r\n clang: numpy\/core\/src\/umath\/ufunc_object.c\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'byte_long' [-Wunused-function]\r\n byte_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'ubyte_long' [-Wunused-function]\r\n ubyte_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'short_long' [-Wunused-function]\r\n short_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'ushort_long' [-Wunused-function]\r\n ushort_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'int_long' [-Wunused-function]\r\n int_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'uint_long' [-Wunused-function]\r\n uint_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'long_long' [-Wunused-function]\r\n long_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'ulong_long' [-Wunused-function]\r\n ulong_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'longlong_long' [-Wunused-function]\r\n longlong_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'ulonglong_long' [-Wunused-function]\r\n ulonglong_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'half_long' [-Wunused-function]\r\n half_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'float_long' [-Wunused-function]\r\n float_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'double_long' [-Wunused-function]\r\n double_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'longdouble_long' [-Wunused-function]\r\n longdouble_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'cfloat_long' [-Wunused-function]\r\n cfloat_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'cdouble_long' [-Wunused-function]\r\n cdouble_long(PyObject *obj)\r\n ^\r\n numpy\/core\/src\/umath\/scalarmath.c.src:1449:1: warning: unused function 'clongdouble_long' [-Wunused-function]\r\n clongdouble_long(PyObject *obj)\r\n ^\r\n clang: numpy\/core\/src\/multiarray\/nditer_constr.c\r\n numpy\/core\/src\/umath\/ufunc_object.c:657:19: warning: comparison of integers of different signs: 'int' and 'size_t' (aka 'unsigned long') [-Wsign-compare]\r\n for (i = 0; i < len; i++) {\r\n ~ ^ ~~~\r\n clang: numpy\/core\/src\/umath\/override.c\r\n clang: numpy\/core\/src\/npymath\/npy_math.c\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\/ieee754.c\r\n numpy\/core\/src\/umath\/loops.c.src:2527:22: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp n = dimensions[0];\r\n ^~~~~~~~~~\r\n numpy\/core\/src\/umath\/loops.c.src:2526:29: note: silence by adding parentheses to mark code as explicitly dead\r\n if (IS_BINARY_REDUCE && 0) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/umath\/loops.c.src:2527:22: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp n = dimensions[0];\r\n ^~~~~~~~~~\r\n numpy\/core\/src\/umath\/loops.c.src:2526:29: note: silence by adding parentheses to mark code as explicitly dead\r\n if (IS_BINARY_REDUCE && 0) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n numpy\/core\/src\/umath\/loops.c.src:2527:22: warning: code will never be executed [-Wunreachable-code]\r\n npy_intp n = dimensions[0];\r\n ^~~~~~~~~~\r\n numpy\/core\/src\/umath\/loops.c.src:2526:29: note: silence by adding parentheses to mark code as explicitly dead\r\n if (IS_BINARY_REDUCE && 0) {\r\n ^\r\n \/* DISABLES CODE *\/ ( )\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\/npy_math_complex.c\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:48:33: warning: unused variable 'tiny' [-Wunused-const-variable]\r\n static const volatile npy_float tiny = 3.9443045e-31f;\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:67:25: warning: unused variable 'c_halff' [-Wunused-const-variable]\r\n static const npy_cfloat c_halff = {0.5F, 0.0};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:68:25: warning: unused variable 'c_if' [-Wunused-const-variable]\r\n static const npy_cfloat c_if = {0.0, 1.0F};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:69:25: warning: unused variable 'c_ihalff' [-Wunused-const-variable]\r\n static const npy_cfloat c_ihalff = {0.0, 0.5F};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:79:1: warning: unused function 'caddf' [-Wunused-function]\r\n caddf(npy_cfloat a, npy_cfloat b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:87:1: warning: unused function 'csubf' [-Wunused-function]\r\n csubf(npy_cfloat a, npy_cfloat b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:137:1: warning: unused function 'cnegf' [-Wunused-function]\r\n cnegf(npy_cfloat a)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:144:1: warning: unused function 'cmulif' [-Wunused-function]\r\n cmulif(npy_cfloat a)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:67:26: warning: unused variable 'c_half' [-Wunused-const-variable]\r\n static const npy_cdouble c_half = {0.5, 0.0};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:68:26: warning: unused variable 'c_i' [-Wunused-const-variable]\r\n static const npy_cdouble c_i = {0.0, 1.0};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:69:26: warning: unused variable 'c_ihalf' [-Wunused-const-variable]\r\n static const npy_cdouble c_ihalf = {0.0, 0.5};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:79:1: warning: unused function 'cadd' [-Wunused-function]\r\n cadd(npy_cdouble a, npy_cdouble b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:87:1: warning: unused function 'csub' [-Wunused-function]\r\n csub(npy_cdouble a, npy_cdouble b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:137:1: warning: unused function 'cneg' [-Wunused-function]\r\n cneg(npy_cdouble a)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:144:1: warning: unused function 'cmuli' [-Wunused-function]\r\n cmuli(npy_cdouble a)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:67:30: warning: unused variable 'c_halfl' [-Wunused-const-variable]\r\n static const npy_clongdouble c_halfl = {0.5L, 0.0};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:68:30: warning: unused variable 'c_il' [-Wunused-const-variable]\r\n static const npy_clongdouble c_il = {0.0, 1.0L};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:69:30: warning: unused variable 'c_ihalfl' [-Wunused-const-variable]\r\n static const npy_clongdouble c_ihalfl = {0.0, 0.5L};\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:79:1: warning: unused function 'caddl' [-Wunused-function]\r\n caddl(npy_clongdouble a, npy_clongdouble b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:87:1: warning: unused function 'csubl' [-Wunused-function]\r\n csubl(npy_clongdouble a, npy_clongdouble b)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:137:1: warning: unused function 'cnegl' [-Wunused-function]\r\n cnegl(npy_clongdouble a)\r\n ^\r\n numpy\/core\/src\/npymath\/npy_math_complex.c.src:144:1: warning: unused function 'cmulil' [-Wunused-function]\r\n cmulil(npy_clongdouble a)\r\n ^\r\n 22 warnings generated.\r\n clang: numpy\/core\/src\/common\/mem_overlap.c\r\n clang: numpy\/core\/src\/npymath\/halffloat.c\r\n clang: numpy\/core\/src\/common\/array_assign.c\r\n clang: numpy\/core\/src\/common\/ufunc_override.c\r\n clang: numpy\/core\/src\/common\/npy_longdouble.c\r\n clang: numpy\/core\/src\/common\/numpyos.c\r\n clang: numpy\/core\/src\/common\/ucsnarrow.c\r\n 1 warning generated.\r\n clang: numpy\/core\/src\/umath\/extobj.c\r\n numpy\/core\/src\/common\/ucsnarrow.c:139:34: warning: 'PyUnicode_FromUnicode' is deprecated [-Wdeprecated-declarations]\r\n ret = (PyUnicodeObject *)PyUnicode_FromUnicode((Py_UNICODE*)buf,\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:551:1: note: 'PyUnicode_FromUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(PyObject*) PyUnicode_FromUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n 1 warning generated.\r\n clang: numpy\/core\/src\/common\/python_xerbla.c\r\n clang: numpy\/core\/src\/common\/cblasfuncs.c\r\n clang: \/private\/var\/folders\/fz\/0j719tys48x7jlnjnwc69smr0000gn\/T\/pip-install-ufzck51l\/numpy_b0e8a3953a1d4b46801f12bcea55536e\/numpy\/_build_utils\/src\/apple_sgemv_fix.c\r\n In file included from \/private\/var\/folders\/fz\/0j719tys48x7jlnjnwc69smr0000gn\/T\/pip-install-ufzck51l\/numpy_b0e8a3953a1d4b46801f12bcea55536e\/numpy\/_build_utils\/src\/apple_sgemv_fix.c:26:\r\n In file included from numpy\/core\/include\/numpy\/arrayobject.h:4:\r\n In file included from numpy\/core\/include\/numpy\/ndarrayobject.h:21:\r\n build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy\/__multiarray_api.h:1463:1: warning: unused function '_import_array' [-Wunused-function]\r\n _import_array(void)\r\n ^\r\n 1 warning generated.\r\n 17 warnings generated.\r\n clang: numpy\/core\/src\/umath\/ufunc_type_resolution.c\r\n 4 warnings generated.\r\n 4 warnings generated.\r\n clang -bundle -undefined dynamic_lookup -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/alloc.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/arrayobject.o build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/arraytypes.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/array_assign_scalar.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/array_assign_array.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/buffer.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/calculation.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/compiled_base.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/common.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/convert.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/convert_datatype.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/conversion_utils.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/ctors.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/datetime.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/datetime_strings.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/datetime_busday.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/datetime_busdaycal.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/descriptor.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/dragon4.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/dtype_transfer.o build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/einsum.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/flagsobject.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/getset.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/hashdescr.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/item_selection.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/iterators.o build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/lowlevel_strided_loops.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/mapping.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/methods.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/multiarraymodule.o build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/nditer_templ.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/nditer_api.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/nditer_constr.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/nditer_pywrap.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/number.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/refcount.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/sequence.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/shape.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/scalarapi.o build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/scalartypes.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/strfuncs.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/temp_elide.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/typeinfo.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/usertypes.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/multiarray\/vdot.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/umathmodule.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/reduction.o build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/loops.o build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/matmul.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/ufunc_object.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/extobj.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/cpuid.o build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/scalarmath.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/ufunc_type_resolution.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/override.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\/npy_math.o build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\/ieee754.o build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\/npy_math_complex.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath\/halffloat.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/array_assign.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/mem_overlap.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/npy_longdouble.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/ucsnarrow.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/ufunc_override.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/numpyos.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/cblasfuncs.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common\/python_xerbla.o build\/temp.macosx-10.15-x86_64-3.9\/private\/var\/folders\/fz\/0j719tys48x7jlnjnwc69smr0000gn\/T\/pip-install-ufzck51l\/numpy_b0e8a3953a1d4b46801f12bcea55536e\/numpy\/_build_utils\/src\/apple_sgemv_fix.o -L\/usr\/local\/lib -L\/usr\/local\/opt\/openssl@1.1\/lib -L\/usr\/local\/opt\/sqlite\/lib -Lbuild\/temp.macosx-10.15-x86_64-3.9 -lnpymath -lnpysort -o build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\/_multiarray_umath.cpython-39-darwin.so -Wl,-framework -Wl,Accelerate\r\n building 'numpy.core._umath_tests' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_umath_tests.c\r\n clang -bundle -undefined dynamic_lookup -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_umath_tests.o -L\/usr\/local\/lib -L\/usr\/local\/opt\/openssl@1.1\/lib -L\/usr\/local\/opt\/sqlite\/lib -Lbuild\/temp.macosx-10.15-x86_64-3.9 -o build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\/_umath_tests.cpython-39-darwin.so\r\n building 'numpy.core._rational_tests' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_rational_tests.c\r\n clang -bundle -undefined dynamic_lookup -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_rational_tests.o -L\/usr\/local\/lib -L\/usr\/local\/opt\/openssl@1.1\/lib -L\/usr\/local\/opt\/sqlite\/lib -Lbuild\/temp.macosx-10.15-x86_64-3.9 -o build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\/_rational_tests.cpython-39-darwin.so\r\n building 'numpy.core._struct_ufunc_tests' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_struct_ufunc_tests.c\r\n clang -bundle -undefined dynamic_lookup -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_struct_ufunc_tests.o -L\/usr\/local\/lib -L\/usr\/local\/opt\/openssl@1.1\/lib -L\/usr\/local\/opt\/sqlite\/lib -Lbuild\/temp.macosx-10.15-x86_64-3.9 -o build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\/_struct_ufunc_tests.cpython-39-darwin.so\r\n building 'numpy.core._operand_flag_tests' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_operand_flag_tests.c\r\n clang -bundle -undefined dynamic_lookup -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/umath\/_operand_flag_tests.o -L\/usr\/local\/lib -L\/usr\/local\/opt\/openssl@1.1\/lib -L\/usr\/local\/opt\/sqlite\/lib -Lbuild\/temp.macosx-10.15-x86_64-3.9 -o build\/lib.macosx-10.15-x86_64-3.9\/numpy\/core\/_operand_flag_tests.cpython-39-darwin.so\r\n building 'numpy.fft.fftpack_lite' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/fft\r\n compile options: '-Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n clang: numpy\/fft\/fftpack_litemodule.c\r\n clang: numpy\/fft\/fftpack.c\r\n clang -bundle -undefined dynamic_lookup -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk build\/temp.macosx-10.15-x86_64-3.9\/numpy\/fft\/fftpack_litemodule.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/fft\/fftpack.o -L\/usr\/local\/lib -L\/usr\/local\/opt\/openssl@1.1\/lib -L\/usr\/local\/opt\/sqlite\/lib -Lbuild\/temp.macosx-10.15-x86_64-3.9 -o build\/lib.macosx-10.15-x86_64-3.9\/numpy\/fft\/fftpack_lite.cpython-39-darwin.so\r\n building 'numpy.linalg.lapack_lite' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/linalg\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/linalg\/lapack_lite\r\n compile options: '-DNO_ATLAS_INFO=3 -DHAVE_CBLAS -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n extra options: '-msse3 -I\/System\/Library\/Frameworks\/vecLib.framework\/Headers'\r\n clang: numpy\/linalg\/lapack_litemodule.c\r\n clang: numpy\/linalg\/lapack_lite\/python_xerbla.c\r\n clang -bundle -undefined dynamic_lookup -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk build\/temp.macosx-10.15-x86_64-3.9\/numpy\/linalg\/lapack_litemodule.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/linalg\/lapack_lite\/python_xerbla.o -L\/usr\/local\/lib -L\/usr\/local\/opt\/openssl@1.1\/lib -L\/usr\/local\/opt\/sqlite\/lib -Lbuild\/temp.macosx-10.15-x86_64-3.9 -o build\/lib.macosx-10.15-x86_64-3.9\/numpy\/linalg\/lapack_lite.cpython-39-darwin.so -Wl,-framework -Wl,Accelerate\r\n building 'numpy.linalg._umath_linalg' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n creating build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/linalg\r\n compile options: '-DNO_ATLAS_INFO=3 -DHAVE_CBLAS -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n extra options: '-msse3 -I\/System\/Library\/Frameworks\/vecLib.framework\/Headers'\r\n clang: build\/src.macosx-10.15-x86_64-3.9\/numpy\/linalg\/umath_linalg.c\r\n numpy\/linalg\/umath_linalg.c.src:735:32: warning: unknown warning group '-Wmaybe-uninitialized', ignored [-Wunknown-warning-option]\r\n #pragma GCC diagnostic ignored \"-Wmaybe-uninitialized\"\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:541:1: warning: unused function 'dump_ufunc_object' [-Wunused-function]\r\n dump_ufunc_object(PyUFuncObject* ufunc)\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:566:1: warning: unused function 'dump_linearize_data' [-Wunused-function]\r\n dump_linearize_data(const char* name, const LINEARIZE_DATA_t* params)\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:602:1: warning: unused function 'dump_FLOAT_matrix' [-Wunused-function]\r\n dump_FLOAT_matrix(const char* name,\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:602:1: warning: unused function 'dump_DOUBLE_matrix' [-Wunused-function]\r\n dump_DOUBLE_matrix(const char* name,\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:602:1: warning: unused function 'dump_CFLOAT_matrix' [-Wunused-function]\r\n dump_CFLOAT_matrix(const char* name,\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:602:1: warning: unused function 'dump_CDOUBLE_matrix' [-Wunused-function]\r\n dump_CDOUBLE_matrix(const char* name,\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:865:1: warning: unused function 'zero_FLOAT_matrix' [-Wunused-function]\r\n zero_FLOAT_matrix(void *dst_in, const LINEARIZE_DATA_t* data)\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:865:1: warning: unused function 'zero_DOUBLE_matrix' [-Wunused-function]\r\n zero_DOUBLE_matrix(void *dst_in, const LINEARIZE_DATA_t* data)\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:865:1: warning: unused function 'zero_CFLOAT_matrix' [-Wunused-function]\r\n zero_CFLOAT_matrix(void *dst_in, const LINEARIZE_DATA_t* data)\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:865:1: warning: unused function 'zero_CDOUBLE_matrix' [-Wunused-function]\r\n zero_CDOUBLE_matrix(void *dst_in, const LINEARIZE_DATA_t* data)\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:1862:1: warning: unused function 'dump_geev_params' [-Wunused-function]\r\n dump_geev_params(const char *name, GEEV_PARAMS_t* params)\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:2132:1: warning: unused function 'init_cgeev' [-Wunused-function]\r\n init_cgeev(GEEV_PARAMS_t* params,\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:2213:1: warning: unused function 'process_cgeev_results' [-Wunused-function]\r\n process_cgeev_results(GEEV_PARAMS_t *NPY_UNUSED(params))\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:2376:1: warning: unused function 'dump_gesdd_params' [-Wunused-function]\r\n dump_gesdd_params(const char *name,\r\n ^\r\n numpy\/linalg\/umath_linalg.c.src:2864:1: warning: unused function 'dump_gelsd_params' [-Wunused-function]\r\n dump_gelsd_params(const char *name,\r\n ^\r\n 16 warnings generated.\r\n clang -bundle -undefined dynamic_lookup -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk build\/temp.macosx-10.15-x86_64-3.9\/build\/src.macosx-10.15-x86_64-3.9\/numpy\/linalg\/umath_linalg.o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/linalg\/lapack_lite\/python_xerbla.o -L\/usr\/local\/lib -L\/usr\/local\/opt\/openssl@1.1\/lib -L\/usr\/local\/opt\/sqlite\/lib -Lbuild\/temp.macosx-10.15-x86_64-3.9 -lnpymath -o build\/lib.macosx-10.15-x86_64-3.9\/numpy\/linalg\/_umath_linalg.cpython-39-darwin.so -Wl,-framework -Wl,Accelerate\r\n building 'numpy.random.mtrand' extension\r\n compiling C sources\r\n C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers\r\n \r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/random\r\n creating build\/temp.macosx-10.15-x86_64-3.9\/numpy\/random\/mtrand\r\n compile options: '-D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c'\r\n clang: numpy\/random\/mtrand\/mtrand.c\r\n clang: numpy\/random\/mtrand\/initarray.cclang: numpy\/random\/mtrand\/randomkit.c\r\n \r\n clang: numpy\/random\/mtrand\/distributions.c\r\n numpy\/random\/mtrand\/mtrand.c:40400:34: error: no member named 'tp_print' in 'struct _typeobject'\r\n __pyx_type_6mtrand_RandomState.tp_print = 0;\r\n ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ^\r\n numpy\/random\/mtrand\/mtrand.c:42673:22: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42673:22: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42673:22: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42673:52: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42673:52: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42673:52: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42689:26: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42689:26: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42689:26: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42689:59: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op) : \\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42689:59: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n ((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\\\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n numpy\/random\/mtrand\/mtrand.c:42689:59: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]\r\n (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'\r\n PyUnicode_WSTR_LENGTH(op)))\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'\r\n #define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/cpython\/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here\r\n Py_DEPRECATED(3.3)\r\n ^\r\n \/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9\/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'\r\n #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))\r\n ^\r\n 12 warnings and 1 error generated.\r\n error: Command \"clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot \/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/usr\/include -I\/Library\/Developer\/CommandLineTools\/SDKs\/MacOSX10.15.sdk\/System\/Library\/Frameworks\/Tk.framework\/Versions\/8.5\/Headers -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy\/core\/include -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/include\/numpy -Inumpy\/core\/src\/common -Inumpy\/core\/src -Inumpy\/core -Inumpy\/core\/src\/npymath -Inumpy\/core\/src\/multiarray -Inumpy\/core\/src\/umath -Inumpy\/core\/src\/npysort -I\/usr\/local\/include -I\/usr\/local\/opt\/openssl@1.1\/include -I\/usr\/local\/opt\/sqlite\/include -I\/Users\/destiny\/Downloads\/env\/include -I\/usr\/local\/Cellar\/python@3.9\/3.9.0_1\/Frameworks\/Python.framework\/Versions\/3.9\/include\/python3.9 -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/common -Ibuild\/src.macosx-10.15-x86_64-3.9\/numpy\/core\/src\/npymath -c numpy\/random\/mtrand\/mtrand.c -o build\/temp.macosx-10.15-x86_64-3.9\/numpy\/random\/mtrand\/mtrand.o -MMD -MF build\/temp.macosx-10.15-x86_64-3.9\/numpy\/random\/mtrand\/mtrand.o.d\" failed with exit status 1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1696\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1696\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1695","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1695\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1695\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1695\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1695","id":780971987,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUwNzc1OTU4","number":1695,"title":"fix ner_tag bugs in thainer","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-07T02:12:33Z","updated_at":"2021-01-07T14:43:45Z","closed_at":"2021-01-07T14:43:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1695","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1695","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1695.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1695.patch","merged_at":"2021-01-07T14:43:28Z"},"body":"fix bug that results in `ner_tag` always equal to 'O'.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1695\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1695\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1694","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1694\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1694\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1694\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1694","id":780429080,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUwMzI0Mjcx","number":1694,"title":"Add OSCAR","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-01-06T10:21:08Z","updated_at":"2021-01-25T09:10:33Z","closed_at":"2021-01-25T09:10:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1694","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1694","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1694.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1694.patch","merged_at":"2021-01-25T09:10:32Z"},"body":"Continuation of #348 \r\nThe files have been moved to S3 and only the unshuffled version is available.\r\nBoth original and deduplicated versions of each language are available.\r\n\r\nExample of usage:\r\n```python\r\nfrom datasets import load_dataset\r\n\r\noscar_dedup_en = load_dataset(\"oscar\", \"unshuffled_deduplicated_en\", split=\"train\")\r\noscar_orig_fr = load_dataset(\"oscar\", \"unshuffled_original_fr\", split=\"train\")\r\n```\r\n\r\ncc @pjox @jonatasgrosman \r\n\r\n-------------\r\n\r\nTo make the metadata generation work in parallel I did a few changes in the `datasets-cli test` command to add the `num_proc` and `proc_rank` arguments. This way you can run multiple processes for the metadata computation.\r\n\r\n```\r\ndatasets-cli test .\/datasets\/oscar --save_infos --all_configs --num_proc 4 --proc_rank 0 --clear_cache --cache_dir tmp0\r\n```\r\n\r\n-------------\r\n\r\nToDo: add the dummy_data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1694\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":2,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1694\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1693","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1693\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1693\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1693\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1693","id":780268595,"node_id":"MDExOlB1bGxSZXF1ZXN0NTUwMTc3MDEx","number":1693,"title":"Fix reuters metadata parsing errors","user":{"login":"jbragg","id":2238344,"node_id":"MDQ6VXNlcjIyMzgzNDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2238344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jbragg","html_url":"https:\/\/github.com\/jbragg","followers_url":"https:\/\/api.github.com\/users\/jbragg\/followers","following_url":"https:\/\/api.github.com\/users\/jbragg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jbragg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jbragg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jbragg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jbragg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jbragg\/repos","events_url":"https:\/\/api.github.com\/users\/jbragg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jbragg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-06T08:26:03Z","updated_at":"2021-01-07T23:53:47Z","closed_at":"2021-01-07T14:01:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1693","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1693","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1693.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1693.patch","merged_at":"2021-01-07T14:01:22Z"},"body":"Was missing the last entry in each metadata category","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1693\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1693\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1691","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1691\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1691\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1691\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1691","id":779882271,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ5ODE3NTM0","number":1691,"title":"Updated HuggingFace Datasets README (fix typos)","user":{"login":"8bitmp3","id":19637339,"node_id":"MDQ6VXNlcjE5NjM3MzM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19637339?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/8bitmp3","html_url":"https:\/\/github.com\/8bitmp3","followers_url":"https:\/\/api.github.com\/users\/8bitmp3\/followers","following_url":"https:\/\/api.github.com\/users\/8bitmp3\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/8bitmp3\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/8bitmp3\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/8bitmp3\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/8bitmp3\/orgs","repos_url":"https:\/\/api.github.com\/users\/8bitmp3\/repos","events_url":"https:\/\/api.github.com\/users\/8bitmp3\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/8bitmp3\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-06T02:14:38Z","updated_at":"2021-01-16T23:30:47Z","closed_at":"2021-01-07T10:06:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1691","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1691","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1691.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1691.patch","merged_at":"2021-01-07T10:06:32Z"},"body":"Awesome work on \ud83e\udd17 Datasets. I found a couple of small typos in the README. Hope this helps.\r\n\r\n\r\n\r\n![](https:\/\/emojipedia-us.s3.dualstack.us-west-1.amazonaws.com\/thumbs\/160\/google\/56\/hugging-face_1f917.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1691\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1691\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1690","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1690\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1690\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1690\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1690","id":779441631,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ5NDEwOTgw","number":1690,"title":"Fast start up","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-05T19:07:53Z","updated_at":"2021-01-06T14:20:59Z","closed_at":"2021-01-06T14:20:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1690","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1690","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1690.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1690.patch","merged_at":"2021-01-06T14:20:58Z"},"body":"Currently if optional dependencies such as tensorflow, torch, apache_beam, faiss and elasticsearch are installed, then it takes a long time to do `import datasets` since it imports all of these heavy dependencies.\r\n\r\nTo make a fast start up for `datasets` I changed that so that they are not imported when `datasets` is being imported. On my side it changed the import time of `datasets` from 5sec to 0.5sec, which is enjoyable.\r\n\r\nTo be able to check if optional dependencies are available without importing them I'm using `importlib_metadata`, which is part of the standard lib in python>=3.8 and was backported. The difference with `importlib` is that it also enables to get the versions of the libraries without importing them.\r\nI added this dependency in `setup.py`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1690\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":3,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1690\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1689","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1689\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1689\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1689\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1689","id":779107313,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ5MTEwMDgw","number":1689,"title":"Fix ade_corpus_v2 config names","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-05T14:33:28Z","updated_at":"2021-01-05T14:55:09Z","closed_at":"2021-01-05T14:55:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1689","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1689","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1689.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1689.patch","merged_at":"2021-01-05T14:55:08Z"},"body":"There are currently some typos in the config names of the `ade_corpus_v2` dataset, I fixed them:\r\n\r\n- Ade_corpos_v2_classificaion -> Ade_corpus_v2_classification\r\n- Ade_corpos_v2_drug_ade_relation -> Ade_corpus_v2_drug_ade_relation\r\n- Ade_corpos_v2_drug_dosage_relation -> Ade_corpus_v2_drug_dosage_relation","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1689\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1689\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1688","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1688\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1688\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1688\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1688","id":779029685,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ5MDM5ODg0","number":1688,"title":"Fix DaNE last example","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-05T13:29:37Z","updated_at":"2021-01-05T14:00:15Z","closed_at":"2021-01-05T14:00:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1688","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1688","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1688.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1688.patch","merged_at":"2021-01-05T14:00:13Z"},"body":"The last example from the DaNE dataset is empty.\r\n\r\nFix #1686 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1688\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1688\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1687","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1687\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1687\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1687\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1687","id":779004894,"node_id":"MDU6SXNzdWU3NzkwMDQ4OTQ=","number":1687,"title":"Question: Shouldn't .info be a part of DatasetDict?","user":{"login":"KennethEnevoldsen","id":23721977,"node_id":"MDQ6VXNlcjIzNzIxOTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23721977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KennethEnevoldsen","html_url":"https:\/\/github.com\/KennethEnevoldsen","followers_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/followers","following_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/repos","events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-05T13:08:41Z","updated_at":"2021-01-07T10:18:06Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, only `Dataset` contains the .info or .features, but as many datasets contains standard splits (train, test) and thus the underlying information is the same (or at least should be) across the datasets. \r\n\r\nFor instance:\r\n```\r\n>>> ds = datasets.load_dataset(\"conll2002\", \"es\")\r\n>>> ds.info\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\nAttributeError: 'DatasetDict' object has no attribute 'info'\r\n```\r\n\r\nI could imagine that this wouldn't work for datasets dicts which hold entirely different datasets (multimodal datasets), but it seems odd that splits of the same dataset is treated the same as what is essentially different datasets. \r\n\r\nIntuitively it would also make sense that if a dataset is supplied via. the load_dataset that is have a common .info which covers the entire dataset.\r\n\r\nIt is entirely possible that I am missing another perspective","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1687\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1687\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1686","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1686\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1686\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1686\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1686","id":778921684,"node_id":"MDU6SXNzdWU3Nzg5MjE2ODQ=","number":1686,"title":"Dataset Error: DaNE contains empty samples at the end","user":{"login":"KennethEnevoldsen","id":23721977,"node_id":"MDQ6VXNlcjIzNzIxOTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23721977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KennethEnevoldsen","html_url":"https:\/\/github.com\/KennethEnevoldsen","followers_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/followers","following_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/repos","events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-01-05T11:54:26Z","updated_at":"2021-01-05T14:01:09Z","closed_at":"2021-01-05T14:00:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The dataset DaNE, contains empty samples at the end. It is naturally easy to remove using a filter but should probably not be there, to begin with as it can cause errors.\r\n\r\n```python\r\n>>> import datasets\r\n[...]\r\n>>> dataset = datasets.load_dataset(\"dane\")\r\n[...]\r\n>>> dataset[\"test\"][-1]\r\n{'dep_ids': [], 'dep_labels': [], 'lemmas': [], 'morph_tags': [], 'ner_tags': [], 'pos_tags': [], 'sent_id': '', 'text': '', 'tok_ids': [], 'tokens': []}\r\n>>> dataset[\"train\"][-1]\r\n{'dep_ids': [], 'dep_labels': [], 'lemmas': [], 'morph_tags': [], 'ner_tags': [], 'pos_tags': [], 'sent_id': '', 'text': '', 'tok_ids': [], 'tokens': []}\r\n```\r\n\r\nBest,\r\nKenneth","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1686\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1686\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1685","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1685\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1685\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1685\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1685","id":778914431,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ4OTM1MzY2","number":1685,"title":"Update README.md of covid-tweets-japanese","user":{"login":"forest1988","id":2755894,"node_id":"MDQ6VXNlcjI3NTU4OTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2755894?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/forest1988","html_url":"https:\/\/github.com\/forest1988","followers_url":"https:\/\/api.github.com\/users\/forest1988\/followers","following_url":"https:\/\/api.github.com\/users\/forest1988\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/forest1988\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/forest1988\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/forest1988\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/forest1988\/orgs","repos_url":"https:\/\/api.github.com\/users\/forest1988\/repos","events_url":"https:\/\/api.github.com\/users\/forest1988\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/forest1988\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-05T11:47:27Z","updated_at":"2021-01-06T10:27:12Z","closed_at":"2021-01-06T09:31:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1685","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1685","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1685.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1685.patch","merged_at":"2021-01-06T09:31:10Z"},"body":"Update README.md of covid-tweets-japanese added by PR https:\/\/github.com\/huggingface\/datasets\/pull\/1367 and https:\/\/github.com\/huggingface\/datasets\/pull\/1402.\r\n\r\n- Update \"Data Splits\" to be more precise that no information is provided for now.\r\n - old: [More Information Needed]\r\n - new: No information about data splits is provided for now.\r\n\r\n- The automatic generation of links seemed not working properly, so I added a space before and after the URL to make the links work correctly.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1685\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1685\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1684","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1684\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1684\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1684\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1684","id":778356196,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ4NDU3NDY1","number":1684,"title":"Add CANER Corpus","user":{"login":"KMFODA","id":35491698,"node_id":"MDQ6VXNlcjM1NDkxNjk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35491698?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KMFODA","html_url":"https:\/\/github.com\/KMFODA","followers_url":"https:\/\/api.github.com\/users\/KMFODA\/followers","following_url":"https:\/\/api.github.com\/users\/KMFODA\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KMFODA\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KMFODA\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KMFODA\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KMFODA\/orgs","repos_url":"https:\/\/api.github.com\/users\/KMFODA\/repos","events_url":"https:\/\/api.github.com\/users\/KMFODA\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KMFODA\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-04T20:49:11Z","updated_at":"2021-01-25T09:09:20Z","closed_at":"2021-01-25T09:09:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1684","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1684","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1684.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1684.patch","merged_at":"2021-01-25T09:09:20Z"},"body":"What does this PR do?\r\n\r\nAdds the following dataset:\r\n\r\nhttps:\/\/github.com\/RamziSalah\/Classical-Arabic-Named-Entity-Recognition-Corpus\r\n\r\nWho can review?\r\n\r\n@lhoestq","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1684\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1684\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1683","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1683\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1683\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1683\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1683","id":778287612,"node_id":"MDU6SXNzdWU3NzgyODc2MTI=","number":1683,"title":"`ArrowInvalid` occurs while running `Dataset.map()` function for DPRContext","user":{"login":"abarbosa94","id":6608232,"node_id":"MDQ6VXNlcjY2MDgyMzI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6608232?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abarbosa94","html_url":"https:\/\/github.com\/abarbosa94","followers_url":"https:\/\/api.github.com\/users\/abarbosa94\/followers","following_url":"https:\/\/api.github.com\/users\/abarbosa94\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abarbosa94\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abarbosa94\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abarbosa94\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abarbosa94\/orgs","repos_url":"https:\/\/api.github.com\/users\/abarbosa94\/repos","events_url":"https:\/\/api.github.com\/users\/abarbosa94\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abarbosa94\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-04T18:47:53Z","updated_at":"2021-01-04T19:04:45Z","closed_at":"2021-01-04T19:04:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It seems to fail the final batch ):\r\n\r\nsteps to reproduce:\r\n```\r\nfrom datasets import load_dataset\r\nfrom elasticsearch import Elasticsearch\r\nimport torch\r\nfrom transformers import file_utils, set_seed\r\nfrom transformers import DPRContextEncoder, DPRContextEncoderTokenizerFast\r\nMAX_SEQ_LENGTH = 256\r\nctx_encoder = DPRContextEncoder.from_pretrained(\"facebook\/dpr-ctx_encoder-single-nq-base\", cache_dir=\"..\/datasets\/\")\r\nctx_tokenizer = DPRContextEncoderTokenizerFast.from_pretrained(\r\n \"facebook\/dpr-ctx_encoder-single-nq-base\", \r\n cache_dir=\"..datasets\/\"\r\n)\r\n\r\ndataset = load_dataset('text', \r\n data_files='data\/raw\/ARC_Corpus.txt',\r\n cache_dir='..\/datasets')\r\n\r\ntorch.set_grad_enabled(False)\r\nds_with_embeddings = dataset.map(\r\n lambda example: {\r\n 'embeddings': ctx_encoder(\r\n **ctx_tokenizer(\r\n example[\"text\"], \r\n padding='max_length', \r\n truncation=True, \r\n max_length=MAX_SEQ_LENGTH,\r\n return_tensors=\"pt\"\r\n )\r\n )[0][0].numpy(),\r\n },\r\n batched=True,\r\n load_from_cache_file=False,\r\n batch_size=1000\r\n)\r\n```\r\nARC Corpus can be obtained from [here](https:\/\/ai2-datasets.s3-us-west-2.amazonaws.com\/arc\/ARC-V1-Feb2018.zip)\r\n\r\nAnd then the error:\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nArrowInvalid Traceback (most recent call last)\r\n in \r\n 14 batched=True,\r\n 15 load_from_cache_file=False,\r\n---> 16 batch_size=1000\r\n 17 )\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py in map(self, function, with_indices, input_columns, batched, batch_size, remove_columns, keep_in_memory, load_from_cache_file, cache_file_names, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc)\r\n 301 num_proc=num_proc,\r\n 302 )\r\n--> 303 for k, dataset in self.items()\r\n 304 }\r\n 305 )\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py in (.0)\r\n 301 num_proc=num_proc,\r\n 302 )\r\n--> 303 for k, dataset in self.items()\r\n 304 }\r\n 305 )\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)\r\n 1257 fn_kwargs=fn_kwargs,\r\n 1258 new_fingerprint=new_fingerprint,\r\n-> 1259 update_data=update_data,\r\n 1260 )\r\n 1261 else:\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 155 }\r\n 156 # apply actual function\r\n--> 157 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 158 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 159 # re-apply format to the output\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 161 # Call actual function\r\n 162 \r\n--> 163 out = func(self, *args, **kwargs)\r\n 164 \r\n 165 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, update_data)\r\n 1526 if update_data:\r\n 1527 batch = cast_to_python_objects(batch)\r\n-> 1528 writer.write_batch(batch)\r\n 1529 if update_data:\r\n 1530 writer.finalize() # close_stream=bool(buf_writer is None)) # We only close if we are writing in a file\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py in write_batch(self, batch_examples, writer_batch_size)\r\n 276 typed_sequence = TypedSequence(batch_examples[col], type=col_type, try_type=col_try_type)\r\n 277 typed_sequence_examples[col] = typed_sequence\r\n--> 278 pa_table = pa.Table.from_pydict(typed_sequence_examples)\r\n 279 self.write_table(pa_table)\r\n 280 \r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.from_pydict()\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.from_arrays()\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.validate()\r\n\r\n~\/.cache\/pypoetry\/virtualenvs\/masters-utTTC0p8-py3.7\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowInvalid: Column 1 named text expected length 768 but got length 1000\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1683\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1683\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1682","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1682\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1682\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1682\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1682","id":778268156,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ4Mzg1NTk1","number":1682,"title":"Don't use xlrd for xlsx files","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-01-04T18:11:50Z","updated_at":"2021-01-04T18:13:14Z","closed_at":"2021-01-04T18:13:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1682","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1682","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1682.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1682.patch","merged_at":"2021-01-04T18:13:13Z"},"body":"Since the latest release of `xlrd` (2.0), the support for xlsx files stopped.\r\nTherefore we needed to use something else.\r\nA good alternative is `openpyxl` which has also an integration with pandas si we can still call `pd.read_excel`.\r\n\r\nI left the unused import of `openpyxl` in the dataset scripts to show users that this is a required dependency to use the scripts.\r\n\r\nI tested the different datasets using `datasets-cli test` and the tests are successful (no missing examples).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1682\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1682\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1681","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1681\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1681\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1681\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1681","id":777644163,"node_id":"MDU6SXNzdWU3Nzc2NDQxNjM=","number":1681,"title":"Dataset \"dane\" missing","user":{"login":"KennethEnevoldsen","id":23721977,"node_id":"MDQ6VXNlcjIzNzIxOTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23721977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KennethEnevoldsen","html_url":"https:\/\/github.com\/KennethEnevoldsen","followers_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/followers","following_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/repos","events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-01-03T14:03:03Z","updated_at":"2021-01-05T08:35:35Z","closed_at":"2021-01-05T08:35:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"the `dane` dataset appear to be missing in the latest version (1.1.3).\r\n\r\n```python\r\n>>> import datasets\r\n>>> datasets.__version__\r\n'1.1.3'\r\n>>> \"dane\" in datasets.list_datasets()\r\nTrue\r\n```\r\n\r\nAs we can see it should be present, but doesn't seem to be findable when using `load_dataset`.\r\n\r\n```python\r\n>>> datasets.load_dataset(\"dane\")\r\nTraceback (most recent call last):\r\n File \"\/home\/kenneth\/.Envs\/EDP\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/home\/kenneth\/.Envs\/EDP\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 300, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/home\/kenneth\/.Envs\/EDP\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 486, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/dane\/dane.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/home\/kenneth\/.Envs\/EDP\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 278, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/home\/kenneth\/.Envs\/EDP\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 300, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/home\/kenneth\/.Envs\/EDP\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 486, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/dane\/dane.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/kenneth\/.Envs\/EDP\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 588, in load_dataset\r\n module_path, hash = prepare_module(\r\n File \"\/home\/kenneth\/.Envs\/EDP\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 280, in prepare_module\r\n raise FileNotFoundError(\r\nFileNotFoundError: Couldn't find file locally at dane\/dane.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/dane\/dane.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/dane\/dane.py\r\n```\r\n\r\nThis issue might be relevant to @ophelielacroix from the Alexandra Institut whom created the data.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1681\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1681\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1680","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1680\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1680\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1680\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1680","id":777623053,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ3ODY4MjEw","number":1680,"title":"added TurkishProductReviews dataset","user":{"login":"basakbuluz","id":41359672,"node_id":"MDQ6VXNlcjQxMzU5Njcy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/41359672?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/basakbuluz","html_url":"https:\/\/github.com\/basakbuluz","followers_url":"https:\/\/api.github.com\/users\/basakbuluz\/followers","following_url":"https:\/\/api.github.com\/users\/basakbuluz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/basakbuluz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/basakbuluz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/basakbuluz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/basakbuluz\/orgs","repos_url":"https:\/\/api.github.com\/users\/basakbuluz\/repos","events_url":"https:\/\/api.github.com\/users\/basakbuluz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/basakbuluz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-01-03T11:52:59Z","updated_at":"2021-01-04T18:15:35Z","closed_at":"2021-01-04T18:15:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1680","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1680","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1680.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1680.patch","merged_at":"2021-01-04T18:15:35Z"},"body":"This PR added **Turkish Product Reviews Dataset contains 235.165 product reviews collected online. There are 220.284 positive, 14881 negative reviews**.\r\n\r\n- **Repository:** [turkish-text-data](https:\/\/github.com\/fthbrmnby\/turkish-text-data)\r\n- **Point of Contact:** Fatih Barmanbay - @fthbrmnby","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1680\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1680\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1679","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1679\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1679\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1679\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1679","id":777587792,"node_id":"MDU6SXNzdWU3Nzc1ODc3OTI=","number":1679,"title":"Can't import cc100 dataset","user":{"login":"alighofrani95","id":14968123,"node_id":"MDQ6VXNlcjE0OTY4MTIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14968123?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alighofrani95","html_url":"https:\/\/github.com\/alighofrani95","followers_url":"https:\/\/api.github.com\/users\/alighofrani95\/followers","following_url":"https:\/\/api.github.com\/users\/alighofrani95\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alighofrani95\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alighofrani95\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alighofrani95\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alighofrani95\/orgs","repos_url":"https:\/\/api.github.com\/users\/alighofrani95\/repos","events_url":"https:\/\/api.github.com\/users\/alighofrani95\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alighofrani95\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-03T07:12:56Z","updated_at":"2021-01-04T18:41:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"There is some issue to import cc100 dataset.\r\n\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"cc100\")\r\n```\r\n\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/cc100\/cc100.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nFileNotFoundError Traceback (most recent call last)\r\nFileNotFoundError: Couldn't find file at https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/cc100\/cc100.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nFileNotFoundError Traceback (most recent call last)\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, **download_kwargs)\r\n 280 raise FileNotFoundError(\r\n 281 \"Couldn't find file locally at {}, or remotely at {} or {}\".format(\r\n--> 282 combined_path, github_file_path, file_path\r\n 283 )\r\n 284 )\r\n\r\nFileNotFoundError: Couldn't find file locally at cc100\/cc100.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/cc100\/cc100.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/cc100\/cc100.py","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1679\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1679\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1678","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1678\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1678\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1678\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1678","id":777567920,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ3ODI4MTMy","number":1678,"title":"Switchboard Dialog Act Corpus added under `datasets\/swda`","user":{"login":"gmihaila","id":22454783,"node_id":"MDQ6VXNlcjIyNDU0Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22454783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gmihaila","html_url":"https:\/\/github.com\/gmihaila","followers_url":"https:\/\/api.github.com\/users\/gmihaila\/followers","following_url":"https:\/\/api.github.com\/users\/gmihaila\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gmihaila\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gmihaila\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gmihaila\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gmihaila\/orgs","repos_url":"https:\/\/api.github.com\/users\/gmihaila\/repos","events_url":"https:\/\/api.github.com\/users\/gmihaila\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gmihaila\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-01-03T03:53:41Z","updated_at":"2021-01-08T18:09:21Z","closed_at":"2021-01-05T10:06:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1678","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1678","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1678.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1678.patch","merged_at":"2021-01-05T10:06:35Z"},"body":"Switchboard Dialog Act Corpus\r\n\r\nIntro:\r\nThe Switchboard Dialog Act Corpus (SwDA) extends the Switchboard-1 Telephone Speech Corpus, Release 2,\r\nwith turn\/utterance-level dialog-act tags. The tags summarize syntactic, semantic, and pragmatic information\r\nabout the associated turn. The SwDA project was undertaken at UC Boulder in the late 1990s.\r\n\r\nDetails:\r\n[homepage](http:\/\/compprag.christopherpotts.net\/swda.html)\r\n[repo](https:\/\/github.com\/NathanDuran\/Switchboard-Corpus\/raw\/master\/swda_data\/)\r\n\r\nI believe this is an important dataset to have since there is no dataset related to dialogue act added.\r\n\r\nI didn't find any formatting for pull request. I hope all this information is enough.\r\n\r\nFor any support please contact me. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1678\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1678\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1677","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1677\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1677\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1677\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1677","id":777553383,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ3ODE3ODI1","number":1677,"title":"Switchboard Dialog Act Corpus added under `datasets\/swda`","user":{"login":"gmihaila","id":22454783,"node_id":"MDQ6VXNlcjIyNDU0Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22454783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gmihaila","html_url":"https:\/\/github.com\/gmihaila","followers_url":"https:\/\/api.github.com\/users\/gmihaila\/followers","following_url":"https:\/\/api.github.com\/users\/gmihaila\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gmihaila\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gmihaila\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gmihaila\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gmihaila\/orgs","repos_url":"https:\/\/api.github.com\/users\/gmihaila\/repos","events_url":"https:\/\/api.github.com\/users\/gmihaila\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gmihaila\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-01-03T01:16:42Z","updated_at":"2021-01-03T02:55:57Z","closed_at":"2021-01-03T02:55:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1677","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1677","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1677.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1677.patch","merged_at":null},"body":"Pleased to announced that I added my first dataset **Switchboard Dialog Act Corpus**.\r\n\r\n\r\nI think this is an important datasets to be added since it is the only one related to dialogue act classification. \r\n\r\nHope the pull request is ok. Wasn't able to see any special formatting for the pull request form.\r\n\r\n\r\nThe Switchboard Dialog Act Corpus (SwDA) extends the Switchboard-1 Telephone Speech Corpus, Release 2,\r\nwith turn\/utterance-level dialog-act tags. The tags summarize syntactic, semantic, and pragmatic information\r\nabout the associated turn. The SwDA project was undertaken at UC Boulder in the late 1990s.\r\n\r\n\r\n[webpage](http:\/\/compprag.christopherpotts.net\/swda.html)\r\n\r\n[repo](https:\/\/github.com\/NathanDuran\/Switchboard-Corpus\/raw\/master\/swda_data\/)\r\n\r\nPlease contact me for any support!\r\n\r\nAll tests passed and followed all steps in the contribution guide!\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1677\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1677\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1676","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1676\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1676\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1676\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1676","id":777477645,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ3NzY1OTY3","number":1676,"title":"new version of Ted Talks IWSLT (WIT3)","user":{"login":"skyprince999","id":9033954,"node_id":"MDQ6VXNlcjkwMzM5NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9033954?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/skyprince999","html_url":"https:\/\/github.com\/skyprince999","followers_url":"https:\/\/api.github.com\/users\/skyprince999\/followers","following_url":"https:\/\/api.github.com\/users\/skyprince999\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/skyprince999\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/skyprince999\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/skyprince999\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/skyprince999\/orgs","repos_url":"https:\/\/api.github.com\/users\/skyprince999\/repos","events_url":"https:\/\/api.github.com\/users\/skyprince999\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/skyprince999\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-01-02T15:30:03Z","updated_at":"2021-01-14T10:10:19Z","closed_at":"2021-01-14T10:10:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1676","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1676","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1676.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1676.patch","merged_at":"2021-01-14T10:10:19Z"},"body":"In the previous iteration #1608 I had used language pairs. Which created 21,582 configs (109*108) !!! \r\n\r\nNow, TED talks in _each language_ is a separate config. So it's more cleaner with _just 109 configs_ (one for each language). Dummy files were created manually. \r\n\r\nLocally I was able to clear the `python datasets-cli test datasets\/......` . Which created the `dataset_info.json` file . The test for the dummy files was also cleared. However couldn't figure out how to specify the local data folder for the real dataset\r\n\r\n\r\n**Note: that this requires manual download of the dataset.** \r\n**Note2: The high number of _Files changed (112)_ is because of the large number of dummy files\/configs!**","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1676\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1676\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1675","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1675\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1675\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1675\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1675","id":777367320,"node_id":"MDU6SXNzdWU3NzczNjczMjA=","number":1675,"title":"Add the 800GB Pile dataset?","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2021-01-01T22:58:12Z","updated_at":"2021-12-01T15:29:07Z","closed_at":"2021-12-01T15:29:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** The Pile\r\n- **Description:** The Pile is a 825 GiB diverse, open source language modelling data set that consists of 22 smaller, high-quality datasets combined together. See [here](https:\/\/twitter.com\/nabla_theta\/status\/1345130408170541056?s=20) for the Twitter announcement\r\n- **Paper:** https:\/\/pile.eleuther.ai\/paper.pdf\r\n- **Data:** https:\/\/pile.eleuther.ai\/\r\n- **Motivation:** Enables hardcore (GPT-3 scale!) language modelling\r\n\r\n## Remarks\r\nGiven the extreme size of this dataset, I'm not sure how feasible this will be to include in `datasets` \ud83e\udd2f . I'm also unsure how many `datasets` users are pretraining LMs, so the usage of this dataset may not warrant the effort to integrate it.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1675\/reactions","total_count":12,"+1":4,"-1":0,"laugh":0,"hooray":0,"confused":1,"heart":0,"rocket":5,"eyes":2},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1675\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1674","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1674\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1674\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1674\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1674","id":777321840,"node_id":"MDU6SXNzdWU3NzczMjE4NDA=","number":1674,"title":"dutch_social can't be loaded","user":{"login":"koenvandenberge","id":10134844,"node_id":"MDQ6VXNlcjEwMTM0ODQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10134844?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/koenvandenberge","html_url":"https:\/\/github.com\/koenvandenberge","followers_url":"https:\/\/api.github.com\/users\/koenvandenberge\/followers","following_url":"https:\/\/api.github.com\/users\/koenvandenberge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/koenvandenberge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/koenvandenberge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/koenvandenberge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/koenvandenberge\/orgs","repos_url":"https:\/\/api.github.com\/users\/koenvandenberge\/repos","events_url":"https:\/\/api.github.com\/users\/koenvandenberge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/koenvandenberge\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-01-01T17:37:08Z","updated_at":"2021-01-05T10:17:01Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi all,\r\n\r\nI'm trying to import the `dutch_social` dataset described [here](https:\/\/huggingface.co\/datasets\/dutch_social).\r\n\r\nHowever, the code that should load the data doesn't seem to be working, in particular because the corresponding files can't be found at the provided links.\r\n\r\n```\r\n(base) Koens-MacBook-Pro:~ koenvandenberge$ python\r\nPython 3.7.4 (default, Aug 13 2019, 15:17:50) \r\n[Clang 4.0.1 (tags\/RELEASE_401\/final)] :: Anaconda, Inc. on darwin\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n>>> from datasets import load_dataset\r\ndataset = load_dataset(\r\n 'dutch_social')\r\n>>> dataset = load_dataset(\r\n... 'dutch_social')\r\nTraceback (most recent call last):\r\n File \"\/Users\/koenvandenberge\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/Users\/koenvandenberge\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/Users\/koenvandenberge\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 486, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/dutch_social\/dutch_social.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/Users\/koenvandenberge\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 278, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/Users\/koenvandenberge\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/Users\/koenvandenberge\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 486, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/dutch_social\/dutch_social.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 2, in \r\n File \"\/Users\/koenvandenberge\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 589, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/Users\/koenvandenberge\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 282, in prepare_module\r\n combined_path, github_file_path, file_path\r\nFileNotFoundError: Couldn't find file locally at dutch_social\/dutch_social.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/dutch_social\/dutch_social.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/dutch_social\/dutch_social.py\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1674\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1674\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1673","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1673\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1673\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1673\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1673","id":777263651,"node_id":"MDU6SXNzdWU3NzcyNjM2NTE=","number":1673,"title":"Unable to Download Hindi Wikipedia Dataset","user":{"login":"aditya3498","id":30871963,"node_id":"MDQ6VXNlcjMwODcxOTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30871963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aditya3498","html_url":"https:\/\/github.com\/aditya3498","followers_url":"https:\/\/api.github.com\/users\/aditya3498\/followers","following_url":"https:\/\/api.github.com\/users\/aditya3498\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aditya3498\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aditya3498\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aditya3498\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aditya3498\/orgs","repos_url":"https:\/\/api.github.com\/users\/aditya3498\/repos","events_url":"https:\/\/api.github.com\/users\/aditya3498\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aditya3498\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-01-01T10:52:53Z","updated_at":"2021-01-05T10:22:12Z","closed_at":"2021-01-05T10:22:12Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I used the Dataset Library in Python to load the wikipedia dataset with the Hindi Config 20200501.hi along with something called beam_runner='DirectRunner' and it keeps giving me the error that the file is not found. I have attached the screenshot of the error and the code both. Please help me to understand how to resolve this issue.\r\n\r\n![Code](https:\/\/user-images.githubusercontent.com\/30871963\/103437466-1f3a3300-4c4e-11eb-9d54-fc9601abfeec.png)\r\n\r\n![Error](https:\/\/user-images.githubusercontent.com\/30871963\/103437407-7ee40e80-4c4d-11eb-8151-a86eb664e6be.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1673\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1673\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1672","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1672\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1672\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1672\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1672","id":777258941,"node_id":"MDU6SXNzdWU3NzcyNTg5NDE=","number":1672,"title":"load_dataset hang on file_lock","user":{"login":"tomacai","id":69860107,"node_id":"MDQ6VXNlcjY5ODYwMTA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69860107?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tomacai","html_url":"https:\/\/github.com\/tomacai","followers_url":"https:\/\/api.github.com\/users\/tomacai\/followers","following_url":"https:\/\/api.github.com\/users\/tomacai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tomacai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tomacai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tomacai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tomacai\/orgs","repos_url":"https:\/\/api.github.com\/users\/tomacai\/repos","events_url":"https:\/\/api.github.com\/users\/tomacai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tomacai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-01-01T10:25:07Z","updated_at":"2021-03-31T16:24:13Z","closed_at":"2021-01-01T11:47:36Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to load the squad dataset. Fails on Windows 10 but succeeds in Colab.\r\nTransformers: 3.3.1\r\nDatasets: 1.0.2\r\nWindows 10 (also tested in WSL)\r\n\r\n```\r\ndatasets.logging.set_verbosity_debug()\r\ndatasets.\r\ntrain_dataset = load_dataset('squad', split='train')\r\nvalid_dataset = load_dataset('squad', split='validation')\r\n\r\ntrain_dataset.features\r\n```\r\n\r\n```\r\nhttps:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/squad\/squad.py not found in cache or force_download set to True, downloading to C:\\Users\\simpl\\.cache\\huggingface\\datasets\\tmpzj_o_6u7\r\nDownloading:\r\n5.24k\/? [00:00<00:00, 134kB\/s]\r\nstoring https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/squad\/squad.py in cache at C:\\Users\\simpl\\.cache\\huggingface\\datasets\\f6877c8d2e01e8fcb60dc101be28b54a7522feac756deb9ac5c39c6d8ebef1ce.85f43de978b9b25921cb78d7a2f2b350c04acdbaedb9ecb5f7101cd7c0950e68.py\r\ncreating metadata file for C:\\Users\\simpl\\.cache\\huggingface\\datasets\\f6877c8d2e01e8fcb60dc101be28b54a7522feac756deb9ac5c39c6d8ebef1ce.85f43de978b9b25921cb78d7a2f2b350c04acdbaedb9ecb5f7101cd7c0950e68.py\r\n\r\nChecking C:\\Users\\simpl\\.cache\\huggingface\\datasets\\f6877c8d2e01e8fcb60dc101be28b54a7522feac756deb9ac5c39c6d8ebef1ce.85f43de978b9b25921cb78d7a2f2b350c04acdbaedb9ecb5f7101cd7c0950e68.py for additional imports.\r\nFound main folder for dataset https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/squad\/squad.py at C:\\Users\\simpl\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\squad\r\nFound specific version folder for dataset https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/squad\/squad.py at C:\\Users\\simpl\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\squad\\1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41\r\nFound script file from https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/squad\/squad.py to C:\\Users\\simpl\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\squad\\1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41\\squad.py\r\nCouldn't find dataset infos file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/squad\\dataset_infos.json\r\nFound metadata file for dataset https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/squad\/squad.py at C:\\Users\\simpl\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\squad\\1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41\\squad.json\r\nNo config specified, defaulting to first: squad\/plain_text\r\n```\r\n\r\nInterrupting the jupyter kernel we are in a file lock.\r\n\r\nIn Google Colab the download is ok. In contrast to a local run in colab dataset_infos.json is downloaded\r\n```\r\nhttps:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/squad\/dataset_infos.json not found in cache or force_download set to True, downloading to \/root\/.cache\/huggingface\/datasets\/tmptl9ha_ad\r\n\r\nDownloading:\r\n2.19k\/? [00:00<00:00, 26.2kB\/s]\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1672\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1672\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1671","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1671\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1671\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1671\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1671","id":776652193,"node_id":"MDU6SXNzdWU3NzY2NTIxOTM=","number":1671,"title":"connection issue ","user":{"login":"rabeehkarimimahabadi","id":73364383,"node_id":"MDQ6VXNlcjczMzY0Mzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73364383?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi","html_url":"https:\/\/github.com\/rabeehkarimimahabadi","followers_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-30T21:56:20Z","updated_at":"2021-01-04T09:59:51Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am getting this connection issue, resulting in large failure on cloud, @lhoestq I appreciate your help on this.\r\n\r\nIf I want to keep the codes the same, so not using save_to_disk, load_from_disk, but save the datastes in the way load_dataset reads from and copy the files in the same folder the datasets library reads from, could you assist me how this can be done, thanks\r\n\r\nI tried to do read the data, save it to a path and then set HF_HOME, which does not work and this is still not reading from the old set path, could you assist me how to save the datasets in a path, and let dataset library read from this path to avoid connection issue. thanks\r\n\r\n```\r\nimdb = datasets.load_dataset(\"imdb\")\r\nimdb.save_to_disk(\"\/idiap\/temp\/rkarimi\/hf_datasets\/imdb\")\r\n>>> os.environ[\"HF_HOME\"]=\"\/idiap\/temp\/rkarimi\/hf_datasets\/\"\r\n>>> imdb = datasets.load_dataset(\"imdb\")\r\nReusing dataset imdb (\/idiap\/temp\/rkarimi\/cache_home_2\/datasets\/imdb\/plain_text\/1.0.0\/90099cb476936b753383ba2ae6ab2eae419b2e87f71cd5189cb9c8e5814d12a3)\r\n```\r\n\r\nI tried afterwards to set HF_HOME in bash, this makes it read from it, but it cannot let dataset library load from the saved path and still downloading data. could you tell me how to fix this issue @lhoestq thanks \r\n\r\nAlso this is on cloud, so I save them in a path, copy it to \"another machine\" to load the data\r\n\r\n### Error stack\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \".\/finetune_t5_trainer.py\", line 344, in \r\n main()\r\n File \".\/finetune_t5_trainer.py\", line 232, in main\r\n for task in data_args.eval_tasks} if training_args.do_test else None\r\n File \".\/finetune_t5_trainer.py\", line 232, in \r\n for task in data_args.eval_tasks} if training_args.do_test else None\r\n File \"\/workdir\/seq2seq\/data\/tasks.py\", line 136, in get_dataset\r\n split = self.get_sampled_split(split, n_obs)\r\n File \"\/workdir\/seq2seq\/data\/tasks.py\", line 64, in get_sampled_split\r\n dataset = self.load_dataset(split)\r\n File \"\/workdir\/seq2seq\/data\/tasks.py\", line 454, in load_dataset\r\n split=split, script_version=\"master\")\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py\", line 589, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py\", line 263, in prepare_module\r\n head_hf_s3(path, filename=name, dataset=dataset)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py\", line 200, in head_hf_s3\r\n return http_head(hf_bucket_url(identifier=identifier, filename=filename, use_cdn=use_cdn, dataset=dataset))\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py\", line 403, in http_head\r\n url, proxies=proxies, headers=headers, cookies=cookies, allow_redirects=allow_redirects, timeout=timeout\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/requests\/api.py\", line 104, in head\r\n return request('head', url, **kwargs)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/requests\/api.py\", line 61, in request\r\n return session.request(method=method, url=url, **kwargs)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/requests\/sessions.py\", line 542, in request\r\n resp = self.send(prep, **send_kwargs)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/requests\/sessions.py\", line 655, in send\r\n r = adapter.send(request, **kwargs)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/requests\/adapters.py\", line 504, in send\r\n raise ConnectTimeout(e, request=request)\r\nrequests.exceptions.ConnectTimeout: HTTPSConnectionPool(host='s3.amazonaws.com', port=443): Max retries exceeded with url: \/datasets.huggingface.co\/datasets\/datasets\/glue\/glue.py (Caused by ConnectTimeoutError(, 'Connection to s3.amazonaws.com timed out. (connect timeout=10)'))\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1671\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1671\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1670","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1670\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1670\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1670\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1670","id":776608579,"node_id":"MDU6SXNzdWU3NzY2MDg1Nzk=","number":1670,"title":"wiki_dpr pre-processing performance","user":{"login":"dbarnhart","id":753898,"node_id":"MDQ6VXNlcjc1Mzg5OA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/753898?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dbarnhart","html_url":"https:\/\/github.com\/dbarnhart","followers_url":"https:\/\/api.github.com\/users\/dbarnhart\/followers","following_url":"https:\/\/api.github.com\/users\/dbarnhart\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dbarnhart\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dbarnhart\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dbarnhart\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dbarnhart\/orgs","repos_url":"https:\/\/api.github.com\/users\/dbarnhart\/repos","events_url":"https:\/\/api.github.com\/users\/dbarnhart\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dbarnhart\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067401494,"node_id":"MDU6TGFiZWwyMDY3NDAxNDk0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Dataset%20discussion","name":"Dataset discussion","color":"72f99f","default":false,"description":"Discussions on the datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-30T19:41:43Z","updated_at":"2021-01-28T09:41:36Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I've been working with wiki_dpr and noticed that the dataset processing is seriously impaired in performance [1]. It takes about 12h to process the entire dataset. Most of this time is simply loading and processing the data, but the actual indexing is also quite slow (3h).\r\n\r\nI won't repeat the concerns around multiprocessing as they are addressed in other issues (#786), but this is the first obvious thing to do. Using cython to speed up the text manipulation may be also help. Loading and processing a dataset of this size in under 15 minutes does not seem unreasonable on a modern multi-core machine. I have hit such targets myself on similar tasks. Would love to see this improve.\r\n\r\nThe other issue is that it takes 3h to construct the FAISS index. If only we could use GPUs with HNSW, but we can't. My sharded GPU indexing code can build an IVF + PQ index in 10 minutes on 20 million vectors. Still, 3h seems slow even for the CPU.\r\n\r\nIt looks like HF is adding only 1000 vectors at a time by default [2], whereas the faiss benchmarks adds 1 million vectors at a time (effectively) [3]. It's possible the runtime could be reduced with a larger batch. Also, it looks like project dependencies ultimately use OpenBLAS, but this is known to have issues when combined with OpenMP, which HNSW does [3]. A workaround is to set the environment variable `OMP_WAIT_POLICY=PASSIVE` via `os.environ` or similar.\r\n\r\nReferences:\r\n[1] https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/wiki_dpr\/wiki_dpr.py\r\n[2] https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/search.py\r\n[3] https:\/\/github.com\/facebookresearch\/faiss\/blob\/master\/benchs\/bench_hnsw.py\r\n[4] https:\/\/github.com\/facebookresearch\/faiss\/issues\/422","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1670\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1670\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1669","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1669\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1669\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1669\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1669","id":776608386,"node_id":"MDU6SXNzdWU3NzY2MDgzODY=","number":1669,"title":"wiki_dpr dataset pre-processesing performance","user":{"login":"dbarnhart","id":753898,"node_id":"MDQ6VXNlcjc1Mzg5OA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/753898?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dbarnhart","html_url":"https:\/\/github.com\/dbarnhart","followers_url":"https:\/\/api.github.com\/users\/dbarnhart\/followers","following_url":"https:\/\/api.github.com\/users\/dbarnhart\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dbarnhart\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dbarnhart\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dbarnhart\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dbarnhart\/orgs","repos_url":"https:\/\/api.github.com\/users\/dbarnhart\/repos","events_url":"https:\/\/api.github.com\/users\/dbarnhart\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dbarnhart\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-30T19:41:09Z","updated_at":"2020-12-30T19:42:25Z","closed_at":"2020-12-30T19:42:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I've been working with wiki_dpr and noticed that the dataset processing is seriously impaired in performance [1]. It takes about 12h to process the entire dataset. Most of this time is simply loading and processing the data, but the actual indexing is also quite slow (3h).\r\n\r\nI won't repeat the concerns around multiprocessing as they are addressed in other issues (#786), but this is the first obvious thing to do. Using cython to speed up the text manipulation may be also help. Loading and processing a dataset of this size in under 15 minutes does not seem unreasonable on a modern multi-core machine. I have hit such targets myself on similar tasks. Would love to see this improve.\r\n\r\nThe other issue is that it takes 3h to construct the FAISS index. If only we could use GPUs with HNSW, but we can't. My sharded GPU indexing code can build an IVF + PQ index in 10 minutes on 20 million vectors. Still, 3h seems slow even for the CPU.\r\n\r\nIt looks like HF is adding only 1000 vectors at a time by default [2], whereas the faiss benchmarks adds 1 million vectors at a time (effectively) [3]. It's possible the runtime could be reduced with a larger batch. Also, it looks like project dependencies ultimately use OpenBLAS, but this is known to have issues when combined with OpenMP, which HNSW does [3]. A workaround is to set the environment variable `OMP_WAIT_POLICY=PASSIVE` via `os.environ` or similar.\r\n\r\nReferences:\r\n[1] https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/wiki_dpr\/wiki_dpr.py\r\n[2] https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/search.py\r\n[3] https:\/\/github.com\/facebookresearch\/faiss\/blob\/master\/benchs\/bench_hnsw.py\r\n[4] https:\/\/github.com\/facebookresearch\/faiss\/issues\/422","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1669\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1669\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1668","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1668\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1668\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1668\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1668","id":776552854,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ3MDIxODI0","number":1668,"title":"xed_en_fi dataset Cleanup","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-30T17:11:18Z","updated_at":"2020-12-30T17:22:44Z","closed_at":"2020-12-30T17:22:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1668","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1668","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1668.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1668.patch","merged_at":"2020-12-30T17:22:43Z"},"body":"Fix ClassLabel feature type and minor mistakes in the dataset card","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1668\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1668\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1667","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1667\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1667\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1667\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1667","id":776446658,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2OTM4MjAy","number":1667,"title":"Fix NER metric example in Overview notebook","user":{"login":"jungwhank","id":53588015,"node_id":"MDQ6VXNlcjUzNTg4MDE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53588015?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jungwhank","html_url":"https:\/\/github.com\/jungwhank","followers_url":"https:\/\/api.github.com\/users\/jungwhank\/followers","following_url":"https:\/\/api.github.com\/users\/jungwhank\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jungwhank\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jungwhank\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jungwhank\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jungwhank\/orgs","repos_url":"https:\/\/api.github.com\/users\/jungwhank\/repos","events_url":"https:\/\/api.github.com\/users\/jungwhank\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jungwhank\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-30T13:05:19Z","updated_at":"2020-12-31T01:12:08Z","closed_at":"2020-12-30T17:21:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1667","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1667","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1667.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1667.patch","merged_at":"2020-12-30T17:21:51Z"},"body":"Fix errors in `NER metric example` section in `Overview.ipynb`.\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nImportError Traceback (most recent call last)\r\n in ()\r\n----> 1 ner_metric = load_metric('seqeval')\r\n 2 references = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]\r\n 3 predictions = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]\r\n 4 ner_metric.compute(predictions, references)\r\n\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, **download_kwargs)\r\n 340 if needs_to_be_installed:\r\n 341 raise ImportError(\r\n--> 342 f\"To be able to use this {module_type}, you need to install the following dependencies\"\r\n 343 f\"{[lib_name for lib_name, lib_path in needs_to_be_installed]} using 'pip install \"\r\n 344 f\"{' '.join([lib_path for lib_name, lib_path in needs_to_be_installed])}' for instance'\"\r\n\r\nImportError: To be able to use this metric, you need to install the following dependencies['seqeval'] using 'pip install seqeval' for instance'\r\n```\r\n\r\n```\r\nValueError Traceback (most recent call last)\r\n in ()\r\n 2 references = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]\r\n 3 predictions = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]\r\n----> 4 ner_metric.compute(predictions, references)\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/metric.py in compute(self, *args, **kwargs)\r\n 378 \"\"\"\r\n 379 if args:\r\n--> 380 raise ValueError(\"Please call `compute` using keyword arguments.\")\r\n 381 \r\n 382 predictions = kwargs.pop(\"predictions\", None)\r\n\r\nValueError: Please call `compute` using keyword arguments.\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1667\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1667\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1666","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1666\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1666\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1666\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1666","id":776432006,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2OTI2MzQw","number":1666,"title":"Add language to dataset card for Makhzan dataset.","user":{"login":"arkhalid","id":14899066,"node_id":"MDQ6VXNlcjE0ODk5MDY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14899066?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arkhalid","html_url":"https:\/\/github.com\/arkhalid","followers_url":"https:\/\/api.github.com\/users\/arkhalid\/followers","following_url":"https:\/\/api.github.com\/users\/arkhalid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arkhalid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arkhalid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arkhalid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arkhalid\/orgs","repos_url":"https:\/\/api.github.com\/users\/arkhalid\/repos","events_url":"https:\/\/api.github.com\/users\/arkhalid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arkhalid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-30T12:25:52Z","updated_at":"2020-12-30T17:20:35Z","closed_at":"2020-12-30T17:20:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1666","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1666","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1666.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1666.patch","merged_at":"2020-12-30T17:20:35Z"},"body":"Add language to dataset card.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1666\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1666\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1665","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1665\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1665\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1665\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1665","id":776431087,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2OTI1NTgw","number":1665,"title":"Add language to dataset card for Counter dataset.","user":{"login":"arkhalid","id":14899066,"node_id":"MDQ6VXNlcjE0ODk5MDY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14899066?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arkhalid","html_url":"https:\/\/github.com\/arkhalid","followers_url":"https:\/\/api.github.com\/users\/arkhalid\/followers","following_url":"https:\/\/api.github.com\/users\/arkhalid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arkhalid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arkhalid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arkhalid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arkhalid\/orgs","repos_url":"https:\/\/api.github.com\/users\/arkhalid\/repos","events_url":"https:\/\/api.github.com\/users\/arkhalid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arkhalid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-30T12:23:20Z","updated_at":"2020-12-30T17:20:20Z","closed_at":"2020-12-30T17:20:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1665","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1665","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1665.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1665.patch","merged_at":"2020-12-30T17:20:20Z"},"body":"Add language.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1665\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1665\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1664","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1664\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1664\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1664\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1664","id":775956441,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2NTM1NDcy","number":1664,"title":"removed \\n in labels","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-29T15:41:43Z","updated_at":"2020-12-30T17:18:49Z","closed_at":"2020-12-30T17:18:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1664","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1664","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1664.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1664.patch","merged_at":"2020-12-30T17:18:49Z"},"body":"updated social_i_qa labels as per #1633 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1664\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1664\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1663","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1663\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1663\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1663\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1663","id":775914320,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2NTAzMjg5","number":1663,"title":"update saving and loading methods for faiss index so to accept path l\u2026","user":{"login":"tslott","id":11614798,"node_id":"MDQ6VXNlcjExNjE0Nzk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11614798?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tslott","html_url":"https:\/\/github.com\/tslott","followers_url":"https:\/\/api.github.com\/users\/tslott\/followers","following_url":"https:\/\/api.github.com\/users\/tslott\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tslott\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tslott\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tslott\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tslott\/orgs","repos_url":"https:\/\/api.github.com\/users\/tslott\/repos","events_url":"https:\/\/api.github.com\/users\/tslott\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tslott\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-29T14:15:37Z","updated_at":"2021-01-18T09:27:23Z","closed_at":"2021-01-18T09:27:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1663","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1663","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1663.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1663.patch","merged_at":"2021-01-18T09:27:23Z"},"body":"- Update saving and loading methods for faiss index so to accept path like objects from pathlib\r\n\r\nThe current code only supports using a string type to save and load a faiss index. This change makes it possible to use a string type OR a Path from [pathlib](https:\/\/docs.python.org\/3\/library\/pathlib.html). The codes becomes a more intuitive this way I think.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1663\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1663\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1662","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1662\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1662\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1662\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1662","id":775890154,"node_id":"MDU6SXNzdWU3NzU4OTAxNTQ=","number":1662,"title":"Arrow file is too large when saving vector data","user":{"login":"weiwangthu","id":22360336,"node_id":"MDQ6VXNlcjIyMzYwMzM2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22360336?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/weiwangthu","html_url":"https:\/\/github.com\/weiwangthu","followers_url":"https:\/\/api.github.com\/users\/weiwangthu\/followers","following_url":"https:\/\/api.github.com\/users\/weiwangthu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/weiwangthu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/weiwangthu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/weiwangthu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/weiwangthu\/orgs","repos_url":"https:\/\/api.github.com\/users\/weiwangthu\/repos","events_url":"https:\/\/api.github.com\/users\/weiwangthu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/weiwangthu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-29T13:23:12Z","updated_at":"2021-01-21T14:12:39Z","closed_at":"2021-01-21T14:12:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I computed the sentence embedding of each sentence of bookcorpus data using bert base and saved them to disk. I used 20M sentences and the obtained arrow file is about 59GB while the original text file is only about 1.3GB. Are there any ways to reduce the size of the arrow file?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1662\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1662\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1661","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1661\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1661\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1661\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1661","id":775840801,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2NDQzNjYx","number":1661,"title":"updated dataset cards","user":{"login":"Nilanshrajput","id":28673745,"node_id":"MDQ6VXNlcjI4NjczNzQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28673745?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Nilanshrajput","html_url":"https:\/\/github.com\/Nilanshrajput","followers_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/followers","following_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/orgs","repos_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/repos","events_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-29T11:20:40Z","updated_at":"2020-12-30T17:15:16Z","closed_at":"2020-12-30T17:15:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1661","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1661","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1661.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1661.patch","merged_at":"2020-12-30T17:15:16Z"},"body":"added dataset instance in the card.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1661\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1661\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1660","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1660\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1660\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1660\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1660","id":775831423,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2NDM2MDg1","number":1660,"title":"add dataset info","user":{"login":"harshalmittal4","id":24206326,"node_id":"MDQ6VXNlcjI0MjA2MzI2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24206326?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/harshalmittal4","html_url":"https:\/\/github.com\/harshalmittal4","followers_url":"https:\/\/api.github.com\/users\/harshalmittal4\/followers","following_url":"https:\/\/api.github.com\/users\/harshalmittal4\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/harshalmittal4\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/harshalmittal4\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/harshalmittal4\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/harshalmittal4\/orgs","repos_url":"https:\/\/api.github.com\/users\/harshalmittal4\/repos","events_url":"https:\/\/api.github.com\/users\/harshalmittal4\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/harshalmittal4\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-29T10:58:19Z","updated_at":"2020-12-30T17:04:30Z","closed_at":"2020-12-30T17:04:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1660","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1660","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1660.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1660.patch","merged_at":"2020-12-30T17:04:30Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1660\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1660\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1659","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1659\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1659\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1659\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1659","id":775831288,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2NDM1OTcy","number":1659,"title":"update dataset info","user":{"login":"harshalmittal4","id":24206326,"node_id":"MDQ6VXNlcjI0MjA2MzI2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24206326?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/harshalmittal4","html_url":"https:\/\/github.com\/harshalmittal4","followers_url":"https:\/\/api.github.com\/users\/harshalmittal4\/followers","following_url":"https:\/\/api.github.com\/users\/harshalmittal4\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/harshalmittal4\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/harshalmittal4\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/harshalmittal4\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/harshalmittal4\/orgs","repos_url":"https:\/\/api.github.com\/users\/harshalmittal4\/repos","events_url":"https:\/\/api.github.com\/users\/harshalmittal4\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/harshalmittal4\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-29T10:58:01Z","updated_at":"2020-12-30T16:55:07Z","closed_at":"2020-12-30T16:55:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1659","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1659","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1659.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1659.patch","merged_at":"2020-12-30T16:55:07Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1659\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1659\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1658","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1658\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1658\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1658\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1658","id":775651085,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2Mjg4Njg4","number":1658,"title":"brwac dataset: add instances and data splits info","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-29T01:24:45Z","updated_at":"2020-12-30T16:54:26Z","closed_at":"2020-12-30T16:54:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1658","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1658","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1658.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1658.patch","merged_at":"2020-12-30T16:54:26Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1658\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1658\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1657","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1657\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1657\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1657\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1657","id":775647000,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2Mjg1NjU2","number":1657,"title":"mac_morpho dataset: add data splits info","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-29T01:05:21Z","updated_at":"2020-12-30T16:51:24Z","closed_at":"2020-12-30T16:51:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1657","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1657","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1657.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1657.patch","merged_at":"2020-12-30T16:51:24Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1657\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1657\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1656","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1656\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1656\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1656\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1656","id":775645356,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2Mjg0NDI3","number":1656,"title":"assin 2 dataset: add instances and data splits info","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-29T00:57:51Z","updated_at":"2020-12-30T16:50:56Z","closed_at":"2020-12-30T16:50:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1656","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1656","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1656.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1656.patch","merged_at":"2020-12-30T16:50:56Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1656\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1656\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1655","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1655\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1655\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1655\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1655","id":775643418,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2MjgyOTM4","number":1655,"title":"assin dataset: add instances and data splits info","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-29T00:47:56Z","updated_at":"2020-12-30T16:50:23Z","closed_at":"2020-12-30T16:50:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1655","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1655","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1655.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1655.patch","merged_at":"2020-12-30T16:50:22Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1655\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1655\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1654","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1654\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1654\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1654\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1654","id":775640729,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2MjgwODIy","number":1654,"title":"lener_br dataset: add instances and data splits info","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-29T00:35:12Z","updated_at":"2020-12-30T16:49:32Z","closed_at":"2020-12-30T16:49:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1654","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1654","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1654.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1654.patch","merged_at":"2020-12-30T16:49:32Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1654\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1654\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1653","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1653\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1653\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1653\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1653","id":775632945,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2Mjc0Njc0","number":1653,"title":"harem dataset: add data splits info","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-28T23:58:20Z","updated_at":"2020-12-30T16:49:03Z","closed_at":"2020-12-30T16:49:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1653","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1653","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1653.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1653.patch","merged_at":"2020-12-30T16:49:03Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1653\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1653\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1652","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1652\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1652\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1652\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1652","id":775571813,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2MjI1NTM1","number":1652,"title":"Update dataset cards from previous sprint","user":{"login":"j-chim","id":22435209,"node_id":"MDQ6VXNlcjIyNDM1MjA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22435209?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/j-chim","html_url":"https:\/\/github.com\/j-chim","followers_url":"https:\/\/api.github.com\/users\/j-chim\/followers","following_url":"https:\/\/api.github.com\/users\/j-chim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/j-chim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/j-chim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/j-chim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/j-chim\/orgs","repos_url":"https:\/\/api.github.com\/users\/j-chim\/repos","events_url":"https:\/\/api.github.com\/users\/j-chim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/j-chim\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-28T20:20:47Z","updated_at":"2020-12-30T16:48:04Z","closed_at":"2020-12-30T16:48:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1652","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1652","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1652.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1652.patch","merged_at":"2020-12-30T16:48:04Z"},"body":"This PR updates the dataset cards\/readmes for the 4 approved PRs I submitted in the previous sprint.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1652\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1652\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1651","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1651\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1651\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1651\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1651","id":775554319,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2MjExMjQw","number":1651,"title":"Add twi wordsim353","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-28T19:31:55Z","updated_at":"2021-01-04T09:39:39Z","closed_at":"2021-01-04T09:39:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1651","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1651","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1651.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1651.patch","merged_at":"2021-01-04T09:39:38Z"},"body":"Added the citation information to the README file","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1651\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1651\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1650","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1650\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1650\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1650\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1650","id":775545912,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2MjA0MzYy","number":1650,"title":"Update README.md","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-28T19:09:05Z","updated_at":"2020-12-29T10:43:14Z","closed_at":"2020-12-29T10:43:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1650","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1650","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1650.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1650.patch","merged_at":"2020-12-29T10:43:14Z"},"body":"added dataset summary","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1650\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1650\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1649","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1649\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1649\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1649\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1649","id":775544487,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2MjAzMjE1","number":1649,"title":"Update README.md","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-28T19:05:00Z","updated_at":"2020-12-29T10:50:58Z","closed_at":"2020-12-29T10:43:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1649","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1649","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1649.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1649.patch","merged_at":"2020-12-29T10:43:03Z"},"body":"Added information in the dataset card","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1649\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1649\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1648","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1648\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1648\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1648\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1648","id":775542360,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2MjAxNTQ0","number":1648,"title":"Update README.md","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-28T18:59:06Z","updated_at":"2020-12-29T10:39:14Z","closed_at":"2020-12-29T10:39:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1648","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1648","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1648.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1648.patch","merged_at":"2020-12-29T10:39:14Z"},"body":"added dataset summary","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1648\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1648\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1647","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1647\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1647\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1647\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1647","id":775525799,"node_id":"MDU6SXNzdWU3NzU1MjU3OTk=","number":1647,"title":"NarrativeQA fails to load with `load_dataset`","user":{"login":"eric-mitchell","id":56408839,"node_id":"MDQ6VXNlcjU2NDA4ODM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56408839?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eric-mitchell","html_url":"https:\/\/github.com\/eric-mitchell","followers_url":"https:\/\/api.github.com\/users\/eric-mitchell\/followers","following_url":"https:\/\/api.github.com\/users\/eric-mitchell\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eric-mitchell\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eric-mitchell\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eric-mitchell\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eric-mitchell\/orgs","repos_url":"https:\/\/api.github.com\/users\/eric-mitchell\/repos","events_url":"https:\/\/api.github.com\/users\/eric-mitchell\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eric-mitchell\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-28T18:16:09Z","updated_at":"2021-01-05T12:05:08Z","closed_at":"2021-01-03T17:58:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When loading the NarrativeQA dataset with `load_dataset('narrativeqa')` as given in the documentation [here](https:\/\/huggingface.co\/datasets\/narrativeqa), I receive a cascade of exceptions, ending with\r\n\r\n FileNotFoundError: Couldn't find file locally at narrativeqa\/narrativeqa.py, or remotely at \r\n https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/narrativeqa\/narrativeqa.py or \r\n https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/narrativeqa\/narrativeqa.py\r\n\r\nWorkaround: manually copy the `narrativeqa.py` builder into my local directory with \r\n\r\n curl https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/narrativeqa\/narrativeqa.py -o narrativeqa.py\r\n\r\nand load the dataset as `load_dataset('narrativeqa.py')` everything works fine. I'm on datasets v1.1.3 using Python 3.6.10.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1647\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1647\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1646","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1646\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1646\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1646\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1646","id":775499344,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2MTY4MTk3","number":1646,"title":"Add missing homepage in some dataset cards","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-28T17:09:48Z","updated_at":"2021-01-04T14:08:57Z","closed_at":"2021-01-04T14:08:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1646","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1646","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1646.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1646.patch","merged_at":"2021-01-04T14:08:56Z"},"body":"In some dataset cards the homepage field in the `Dataset Description` section was missing\/empty","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1646\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1646\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1645","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1645\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1645\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1645\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1645","id":775473106,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ2MTQ4OTUx","number":1645,"title":"Rename \"part-of-speech-tagging\" tag in some dataset cards","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-28T16:09:09Z","updated_at":"2021-01-07T10:08:14Z","closed_at":"2021-01-07T10:08:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1645","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1645","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1645.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1645.patch","merged_at":"2021-01-07T10:08:13Z"},"body":"`part-of-speech-tagging` was not part of the tagging taxonomy under `structure-prediction`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1645\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1645\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1644","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1644\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1644\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1644\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1644","id":775375880,"node_id":"MDU6SXNzdWU3NzUzNzU4ODA=","number":1644,"title":"HoVeR dataset fails to load","user":{"login":"urikz","id":1473778,"node_id":"MDQ6VXNlcjE0NzM3Nzg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1473778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/urikz","html_url":"https:\/\/github.com\/urikz","followers_url":"https:\/\/api.github.com\/users\/urikz\/followers","following_url":"https:\/\/api.github.com\/users\/urikz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/urikz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/urikz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/urikz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/urikz\/orgs","repos_url":"https:\/\/api.github.com\/users\/urikz\/repos","events_url":"https:\/\/api.github.com\/users\/urikz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/urikz\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-28T12:27:07Z","updated_at":"2021-01-04T18:46:31Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi! I'm getting an error when trying to load **HoVeR** dataset. Another one (**SQuAD**) does work for me. I'm using the latest (1.1.3) version of the library.\r\n\r\nSteps to reproduce the error:\r\n\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset(\"hover\")\r\nTraceback (most recent call last):\r\n File \"\/Users\/urikz\/anaconda\/envs\/mentionmemory\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/Users\/urikz\/anaconda\/envs\/mentionmemory\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/Users\/urikz\/anaconda\/envs\/mentionmemory\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 486, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/hover\/hover.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/Users\/urikz\/anaconda\/envs\/mentionmemory\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 278, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/Users\/urikz\/anaconda\/envs\/mentionmemory\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/Users\/urikz\/anaconda\/envs\/mentionmemory\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 486, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/hover\/hover.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/urikz\/anaconda\/envs\/mentionmemory\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 589, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/Users\/urikz\/anaconda\/envs\/mentionmemory\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 282, in prepare_module\r\n combined_path, github_file_path, file_path\r\nFileNotFoundError: Couldn't find file locally at hover\/hover.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/hover\/hover.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/hover\/hover.py\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1644\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1644\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1643","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1643\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1643\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1643\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1643","id":775280046,"node_id":"MDU6SXNzdWU3NzUyODAwNDY=","number":1643,"title":"Dataset social_bias_frames 404","user":{"login":"atemate","id":7501517,"node_id":"MDQ6VXNlcjc1MDE1MTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7501517?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/atemate","html_url":"https:\/\/github.com\/atemate","followers_url":"https:\/\/api.github.com\/users\/atemate\/followers","following_url":"https:\/\/api.github.com\/users\/atemate\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/atemate\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/atemate\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/atemate\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/atemate\/orgs","repos_url":"https:\/\/api.github.com\/users\/atemate\/repos","events_url":"https:\/\/api.github.com\/users\/atemate\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/atemate\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-28T08:35:34Z","updated_at":"2020-12-28T08:38:07Z","closed_at":"2020-12-28T08:38:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset(\"social_bias_frames\")\r\n...\r\nDownloading and preparing dataset social_bias_frames\/default\r\n...\r\n~\/.pyenv\/versions\/3.7.6\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag)\r\n 484 )\r\n 485 elif response is not None and response.status_code == 404:\r\n--> 486 raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\n 487 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n 488 \r\n\r\nFileNotFoundError: Couldn't find file at https:\/\/homes.cs.washington.edu\/~msap\/social-bias-frames\/SocialBiasFrames_v2.tgz\r\n```\r\n[Here](https:\/\/homes.cs.washington.edu\/~msap\/social-bias-frames\/) we find button `Download data` with the correct URL for the data: https:\/\/homes.cs.washington.edu\/~msap\/social-bias-frames\/SBIC.v2.tgz","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1643\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1643\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1642","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1642\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1642\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1642\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1642","id":775159568,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ1ODk1MzY1","number":1642,"title":"Ollie dataset","user":{"login":"ontocord","id":8900094,"node_id":"MDQ6VXNlcjg5MDAwOTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8900094?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ontocord","html_url":"https:\/\/github.com\/ontocord","followers_url":"https:\/\/api.github.com\/users\/ontocord\/followers","following_url":"https:\/\/api.github.com\/users\/ontocord\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ontocord\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ontocord\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ontocord\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ontocord\/orgs","repos_url":"https:\/\/api.github.com\/users\/ontocord\/repos","events_url":"https:\/\/api.github.com\/users\/ontocord\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ontocord\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-28T02:43:37Z","updated_at":"2021-01-04T13:35:25Z","closed_at":"2021-01-04T13:35:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1642","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1642","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1642.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1642.patch","merged_at":"2021-01-04T13:35:24Z"},"body":"This is the dataset used to train the Ollie open information extraction algorithm. It has over 21M sentences. See http:\/\/knowitall.github.io\/ollie\/ for more details.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1642\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1642\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1641","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1641\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1641\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1641\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1641","id":775110872,"node_id":"MDU6SXNzdWU3NzUxMTA4NzI=","number":1641,"title":"muchocine dataset cannot be dowloaded","user":{"login":"mrm8488","id":3653789,"node_id":"MDQ6VXNlcjM2NTM3ODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3653789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mrm8488","html_url":"https:\/\/github.com\/mrm8488","followers_url":"https:\/\/api.github.com\/users\/mrm8488\/followers","following_url":"https:\/\/api.github.com\/users\/mrm8488\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mrm8488\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mrm8488\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mrm8488\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mrm8488\/orgs","repos_url":"https:\/\/api.github.com\/users\/mrm8488\/repos","events_url":"https:\/\/api.github.com\/users\/mrm8488\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mrm8488\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892913,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEz","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/wontfix","name":"wontfix","color":"ffffff","default":true,"description":"This will not be worked on"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-27T21:26:28Z","updated_at":"2021-08-03T05:07:29Z","closed_at":"2021-08-03T05:07:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```python\r\n---------------------------------------------------------------------------\r\nFileNotFoundError Traceback (most recent call last)\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, **download_kwargs)\r\n 267 try:\r\n--> 268 local_path = cached_path(file_path, download_config=download_config)\r\n 269 except FileNotFoundError:\r\n\r\n7 frames\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/muchocine\/muchocine.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nFileNotFoundError Traceback (most recent call last)\r\nFileNotFoundError: Couldn't find file at https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/muchocine\/muchocine.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nFileNotFoundError Traceback (most recent call last)\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, **download_kwargs)\r\n 281 raise FileNotFoundError(\r\n 282 \"Couldn't find file locally at {}, or remotely at {} or {}\".format(\r\n--> 283 combined_path, github_file_path, file_path\r\n 284 )\r\n 285 )\r\n\r\nFileNotFoundError: Couldn't find file locally at muchocine\/muchocine.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.2\/datasets\/muchocine\/muchocine.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/muchocine\/muchocine.py\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1641\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1641\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1640","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1640\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1640\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1640\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1640","id":774921836,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ1NzI2NzY4","number":1640,"title":"Fix \"'BertTokenizerFast' object has no attribute 'max_len'\"","user":{"login":"mflis","id":15031715,"node_id":"MDQ6VXNlcjE1MDMxNzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15031715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mflis","html_url":"https:\/\/github.com\/mflis","followers_url":"https:\/\/api.github.com\/users\/mflis\/followers","following_url":"https:\/\/api.github.com\/users\/mflis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mflis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mflis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mflis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mflis\/orgs","repos_url":"https:\/\/api.github.com\/users\/mflis\/repos","events_url":"https:\/\/api.github.com\/users\/mflis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mflis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-26T19:25:41Z","updated_at":"2020-12-28T17:26:35Z","closed_at":"2020-12-28T17:26:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1640","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1640","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1640.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1640.patch","merged_at":"2020-12-28T17:26:35Z"},"body":"Tensorflow 2.3.0 gives:\r\n FutureWarning: The `max_len` attribute has been deprecated and will be removed in a future version, use `model_max_length` instead.\r\n\r\nTensorflow 2.4.0 gives:\r\nAttributeError 'BertTokenizerFast' object has no attribute 'max_len'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1640\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1640\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1639","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1639\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1639\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1639\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1639","id":774903472,"node_id":"MDU6SXNzdWU3NzQ5MDM0NzI=","number":1639,"title":"bug with sst2 in glue ","user":{"login":"ghost","id":10137,"node_id":"MDQ6VXNlcjEwMTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghost","html_url":"https:\/\/github.com\/ghost","followers_url":"https:\/\/api.github.com\/users\/ghost\/followers","following_url":"https:\/\/api.github.com\/users\/ghost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghost\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghost\/repos","events_url":"https:\/\/api.github.com\/users\/ghost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghost\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-26T16:57:23Z","updated_at":"2021-08-27T15:03:23Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am getting very low accuracy on SST2 I investigate this and observe that for this dataset sentences are tokenized, while this is correct for the other datasets in GLUE, please see below.\r\nIs there any alternatives I could get untokenized sentences? I am unfortunately under time pressure to report some results on this dataset. thank you for your help. @lhoestq \r\n \r\n```\r\n>>> a = datasets.load_dataset('glue', 'sst2', split=\"validation\", script_version=\"master\")\r\nReusing dataset glue (\/julia\/datasets\/glue\/sst2\/1.0.0\/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)\r\n>>> a[:10]\r\n{'idx': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], 'label': [1, 0, 1, 1, 0, 1, 0, 0, 1, 0], 'sentence': [\"it 's a charming and often affecting journey . \", 'unflinchingly bleak and desperate ', 'allows us to hope that nolan is poised to embark a major career as a commercial yet inventive filmmaker . ', \"the acting , costumes , music , cinematography and sound are all astounding given the production 's austere locales . \", \"it 's slow -- very , very slow . \", 'although laced with humor and a few fanciful touches , the film is a refreshingly serious look at young women . ', 'a sometimes tedious film . ', \"or doing last year 's taxes with your ex-wife . \", \"you do n't have to know about music to appreciate the film 's easygoing blend of comedy and romance . \", \"in exactly 89 minutes , most of which passed as slowly as if i 'd been sitting naked on an igloo , formula 51 sank from quirky to jerky to utter turkey . \"]}\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1639\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1639\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1638","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1638\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1638\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1638\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1638","id":774869184,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ1Njg5ODQ5","number":1638,"title":"Add id_puisi dataset","user":{"login":"ilhamfp","id":31740013,"node_id":"MDQ6VXNlcjMxNzQwMDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31740013?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ilhamfp","html_url":"https:\/\/github.com\/ilhamfp","followers_url":"https:\/\/api.github.com\/users\/ilhamfp\/followers","following_url":"https:\/\/api.github.com\/users\/ilhamfp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ilhamfp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ilhamfp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ilhamfp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ilhamfp\/orgs","repos_url":"https:\/\/api.github.com\/users\/ilhamfp\/repos","events_url":"https:\/\/api.github.com\/users\/ilhamfp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ilhamfp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-26T12:41:55Z","updated_at":"2020-12-30T16:34:17Z","closed_at":"2020-12-30T16:34:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1638","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1638","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1638.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1638.patch","merged_at":"2020-12-30T16:34:17Z"},"body":"Puisi (poem) is an Indonesian poetic form. The dataset contains 7223 Indonesian puisi with its title and author. :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1638\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1638\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1637","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1637\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1637\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1637\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1637","id":774710014,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ1NTc1NTMw","number":1637,"title":"Added `pn_summary` dataset","user":{"login":"m3hrdadfi","id":2601833,"node_id":"MDQ6VXNlcjI2MDE4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2601833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/m3hrdadfi","html_url":"https:\/\/github.com\/m3hrdadfi","followers_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/followers","following_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/orgs","repos_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/repos","events_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-25T11:01:24Z","updated_at":"2021-01-04T13:43:19Z","closed_at":"2021-01-04T13:43:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1637","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1637","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1637.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1637.patch","merged_at":"2021-01-04T13:43:19Z"},"body":"#1635 \r\n\r\nYou did a great job with the fluent procedure regarding adding a dataset. I took the chance to add the dataset on my own. Thank you for your awesome job, and I hope this dataset found the researchers happy, specifically those interested in Persian Language (Farsi)!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1637\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1637\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1636","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1636\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1636\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1636\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1636","id":774574378,"node_id":"MDU6SXNzdWU3NzQ1NzQzNzg=","number":1636,"title":"winogrande cannot be dowloaded ","user":{"login":"ghost","id":10137,"node_id":"MDQ6VXNlcjEwMTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghost","html_url":"https:\/\/github.com\/ghost","followers_url":"https:\/\/api.github.com\/users\/ghost\/followers","following_url":"https:\/\/api.github.com\/users\/ghost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghost\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghost\/repos","events_url":"https:\/\/api.github.com\/users\/ghost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghost\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-24T22:28:22Z","updated_at":"2020-12-28T13:53:49Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI am getting this error when trying to run the codes on the cloud. Thank you for any suggestion and help on this @lhoestq \r\n\r\n```\r\n File \".\/finetune_trainer.py\", line 318, in \r\n main()\r\n File \".\/finetune_trainer.py\", line 148, in main\r\n for task in data_args.tasks]\r\n File \".\/finetune_trainer.py\", line 148, in \r\n for task in data_args.tasks]\r\n File \"\/workdir\/seq2seq\/data\/tasks.py\", line 65, in get_dataset\r\n dataset = self.load_dataset(split=split)\r\n File \"\/workdir\/seq2seq\/data\/tasks.py\", line 466, in load_dataset\r\n return datasets.load_dataset('winogrande', 'winogrande_l', split=split)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py\", line 589, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py\", line 487, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/winogrande\/winogrande.py\r\nyo\/0 I1224 14:17:46.419031 31226 main shadow.py:122 > Traceback (most recent call last):\r\n File \"\/usr\/lib\/python3.6\/runpy.py\", line 193, in _run_module_as_main\r\n \"__main__\", mod_spec)\r\n File \"\/usr\/lib\/python3.6\/runpy.py\", line 85, in _run_code\r\n exec(code, run_globals)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/torch\/distributed\/launch.py\", line 260, in \r\n main()\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/torch\/distributed\/launch.py\", line 256, in main\r\n cmd=cmd)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1636\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1636\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1635","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1635\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1635\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1635\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1635","id":774524492,"node_id":"MDU6SXNzdWU3NzQ1MjQ0OTI=","number":1635,"title":"Persian Abstractive\/Extractive Text Summarization","user":{"login":"m3hrdadfi","id":2601833,"node_id":"MDQ6VXNlcjI2MDE4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2601833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/m3hrdadfi","html_url":"https:\/\/github.com\/m3hrdadfi","followers_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/followers","following_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/orgs","repos_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/repos","events_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/m3hrdadfi\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-24T17:47:12Z","updated_at":"2021-01-04T15:11:04Z","closed_at":"2021-01-04T15:11:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Assembling datasets tailored to different tasks and languages is a precious target. This would be great to have this dataset included.\r\n\r\n## Adding a Dataset\r\n- **Name:** *pn-summary*\r\n- **Description:** *A well-structured summarization dataset for the Persian language consists of 93,207 records. It is prepared for Abstractive\/Extractive tasks (like cnn_dailymail for English). It can also be used in other scopes like Text Generation, Title Generation, and News Category Classification.*\r\n- **Paper:** *https:\/\/arxiv.org\/abs\/2012.11204*\r\n- **Data:** *https:\/\/github.com\/hooshvare\/pn-summary\/#download*\r\n- **Motivation:** *It is the first Persian abstractive\/extractive Text summarization dataset (like cnn_dailymail for English)!*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1635\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1635\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1634","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1634\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1634\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1634\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1634","id":774487934,"node_id":"MDU6SXNzdWU3NzQ0ODc5MzQ=","number":1634,"title":"Inspecting datasets per category","user":{"login":"ghost","id":10137,"node_id":"MDQ6VXNlcjEwMTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghost","html_url":"https:\/\/github.com\/ghost","followers_url":"https:\/\/api.github.com\/users\/ghost\/followers","following_url":"https:\/\/api.github.com\/users\/ghost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghost\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghost\/repos","events_url":"https:\/\/api.github.com\/users\/ghost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghost\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-24T15:26:34Z","updated_at":"2021-01-08T09:28:04Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nIs there a way I could get all NLI datasets\/all QA datasets to get some understanding of available datasets per category? this is hard for me to inspect the datasets one by one in the webpage, thanks for the suggestions @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1634\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1634\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1633","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1633\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1633\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1633\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1633","id":774422603,"node_id":"MDU6SXNzdWU3NzQ0MjI2MDM=","number":1633,"title":"social_i_qa wrong format of labels","user":{"login":"ghost","id":10137,"node_id":"MDQ6VXNlcjEwMTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghost","html_url":"https:\/\/github.com\/ghost","followers_url":"https:\/\/api.github.com\/users\/ghost\/followers","following_url":"https:\/\/api.github.com\/users\/ghost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghost\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghost\/repos","events_url":"https:\/\/api.github.com\/users\/ghost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghost\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-24T13:11:54Z","updated_at":"2020-12-30T17:18:49Z","closed_at":"2020-12-30T17:18:49Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nthere is extra \"\\n\" in labels of social_i_qa datasets, no big deal, but I was wondering if you could remove it to make it consistent.\r\nso label is 'label': '1\\n', not '1'\r\nthanks\r\n\r\n```\r\n>>> import datasets \r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset(\r\n... 'social_i_qa')\r\ncahce dir \/julia\/cache\/datasets\r\nDownloading: 4.72kB [00:00, 3.52MB\/s] \r\ncahce dir \/julia\/cache\/datasets\r\nDownloading: 2.19kB [00:00, 1.81MB\/s] \r\nUsing custom data configuration default\r\nReusing dataset social_i_qa (\/julia\/datasets\/social_i_qa\/default\/0.1.0\/4a4190cc2d2482d43416c2167c0c5dccdd769d4482e84893614bd069e5c3ba06)\r\n>>> dataset['train'][0]\r\n{'answerA': 'like attending', 'answerB': 'like staying home', 'answerC': 'a good friend to have', 'context': 'Cameron decided to have a barbecue and gathered her friends together.', 'label': '1\\n', 'question': 'How would Others feel as a result?'}\r\n\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1633\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1633\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1632","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1632\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1632\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1632\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1632","id":774388625,"node_id":"MDU6SXNzdWU3NzQzODg2MjU=","number":1632,"title":"SICK dataset ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-24T12:40:14Z","updated_at":"2021-02-05T15:49:25Z","closed_at":"2021-02-05T15:49:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, this would be great to have this dataset included. I might be missing something, but I could not find it in the list of already included datasets. Thank you. \r\n\r\n## Adding a Dataset\r\n- **Name:** SICK\r\n- **Description:** SICK consists of about 10,000 English sentence pairs that include many examples of the lexical, syntactic, and semantic phenomena. \r\n- **Paper:** https:\/\/www.aclweb.org\/anthology\/L14-1314\/\r\n- **Data:** http:\/\/marcobaroni.org\/composes\/sick.html\r\n- **Motivation:** This dataset is well-known in the NLP community used for recognizing entailment between sentences.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1632\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1632\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1631","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1631\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1631\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1631\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1631","id":774349222,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ1Mjc5MTE2","number":1631,"title":"Update README.md","user":{"login":"savasy","id":6584825,"node_id":"MDQ6VXNlcjY1ODQ4MjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6584825?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/savasy","html_url":"https:\/\/github.com\/savasy","followers_url":"https:\/\/api.github.com\/users\/savasy\/followers","following_url":"https:\/\/api.github.com\/users\/savasy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/savasy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/savasy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/savasy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/savasy\/orgs","repos_url":"https:\/\/api.github.com\/users\/savasy\/repos","events_url":"https:\/\/api.github.com\/users\/savasy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/savasy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-24T11:45:52Z","updated_at":"2020-12-28T17:35:41Z","closed_at":"2020-12-28T17:16:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1631","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1631","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1631.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1631.patch","merged_at":"2020-12-28T17:16:04Z"},"body":"I made small change for citation","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1631\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1631\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1630","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1630\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1630\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1630\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1630","id":774332129,"node_id":"MDU6SXNzdWU3NzQzMzIxMjk=","number":1630,"title":"Adding UKP Argument Aspect Similarity Corpus","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-24T11:01:31Z","updated_at":"2020-12-24T11:30:18Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, this would be great to have this dataset included.\r\n\r\n## Adding a Dataset\r\n- **Name:** UKP Argument Aspect Similarity Corpus\r\n- **Description:** The UKP Argument Aspect Similarity Corpus (UKP ASPECT) includes 3,595 sentence pairs over 28 controversial topics. Each sentence pair was annotated via crowdsourcing as either \u201chigh similarity\u201d, \u201csome similarity\u201d, \u201cno similarity\u201d or \u201cnot related\u201d with respect to the topic.\r\n- **Paper:** https:\/\/www.aclweb.org\/anthology\/P19-1054\/\r\n- **Data:** https:\/\/tudatalib.ulb.tu-darmstadt.de\/handle\/tudatalib\/1998\r\n- **Motivation:** this is one of the datasets currently used frequently in recent adapter papers like https:\/\/arxiv.org\/pdf\/2005.00247.pdf \r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nThank you","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1630\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1630\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1629","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1629\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1629\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1629\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1629","id":774255716,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ1MjAwNTQ3","number":1629,"title":"add wongnai_reviews test set labels","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-24T08:02:31Z","updated_at":"2020-12-28T17:23:39Z","closed_at":"2020-12-28T17:23:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1629","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1629","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1629.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1629.patch","merged_at":"2020-12-28T17:23:39Z"},"body":"- add test set labels provided by @ekapolc\r\n- refactor `star_rating` to a `datasets.features.ClassLabel` field","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1629\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1629\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1628","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1628\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1628\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1628\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1628","id":774091411,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ1MDY5NTAy","number":1628,"title":"made suggested changes to hate-speech-and-offensive-language","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-23T23:25:32Z","updated_at":"2020-12-28T10:11:20Z","closed_at":"2020-12-28T10:11:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1628","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1628","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1628.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1628.patch","merged_at":"2020-12-28T10:11:20Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1628\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1628\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1627","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1627\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1627\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1627\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1627","id":773960255,"node_id":"MDU6SXNzdWU3NzM5NjAyNTU=","number":1627,"title":"`Dataset.map` disable progress bar","user":{"login":"Nickil21","id":8767964,"node_id":"MDQ6VXNlcjg3Njc5NjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8767964?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Nickil21","html_url":"https:\/\/github.com\/Nickil21","followers_url":"https:\/\/api.github.com\/users\/Nickil21\/followers","following_url":"https:\/\/api.github.com\/users\/Nickil21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Nickil21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Nickil21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Nickil21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Nickil21\/orgs","repos_url":"https:\/\/api.github.com\/users\/Nickil21\/repos","events_url":"https:\/\/api.github.com\/users\/Nickil21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Nickil21\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-23T17:53:42Z","updated_at":"2020-12-26T19:57:36Z","closed_at":"2020-12-26T19:57:17Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I can't find anything to turn off the `tqdm` progress bars while running a preprocessing function using `Dataset.map`. I want to do akin to `disable_tqdm=True` in the case of `transformers`. Is there something like that?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1627\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1627\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1626","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1626\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1626\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1626\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1626","id":773840368,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ0ODYxMDE4","number":1626,"title":"Fix dataset_dict.shuffle with single seed","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-23T14:33:36Z","updated_at":"2021-01-04T10:00:04Z","closed_at":"2021-01-04T10:00:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1626","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1626","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1626.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1626.patch","merged_at":"2021-01-04T10:00:03Z"},"body":"Fix #1610 \r\n\r\nI added support for single integer used in `DatasetDict.shuffle`. Previously only a dictionary of seed was allowed.\r\nMoreover I added the missing `seed` parameter. Previously only `seeds` was allowed.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1626\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1626\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1625","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1625\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1625\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1625\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1625","id":773771596,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ0Nzk4MDM1","number":1625,"title":"Fixed bug in the shape property","user":{"login":"noaonoszko","id":47183162,"node_id":"MDQ6VXNlcjQ3MTgzMTYy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47183162?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/noaonoszko","html_url":"https:\/\/github.com\/noaonoszko","followers_url":"https:\/\/api.github.com\/users\/noaonoszko\/followers","following_url":"https:\/\/api.github.com\/users\/noaonoszko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/noaonoszko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/noaonoszko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/noaonoszko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/noaonoszko\/orgs","repos_url":"https:\/\/api.github.com\/users\/noaonoszko\/repos","events_url":"https:\/\/api.github.com\/users\/noaonoszko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/noaonoszko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-23T13:33:21Z","updated_at":"2021-01-02T23:22:52Z","closed_at":"2020-12-23T14:13:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1625","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1625","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1625.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1625.patch","merged_at":"2020-12-23T14:13:13Z"},"body":"Fix to the bug reported in issue #1622. Just replaced `return tuple(self._indices.num_rows, self._data.num_columns)` by `return (self._indices.num_rows, self._data.num_columns)`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1625\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1625\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1624","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1624\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1624\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1624\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1624","id":773669700,"node_id":"MDU6SXNzdWU3NzM2Njk3MDA=","number":1624,"title":"Cannot download ade_corpus_v2","user":{"login":"him1411","id":20259310,"node_id":"MDQ6VXNlcjIwMjU5MzEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20259310?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/him1411","html_url":"https:\/\/github.com\/him1411","followers_url":"https:\/\/api.github.com\/users\/him1411\/followers","following_url":"https:\/\/api.github.com\/users\/him1411\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/him1411\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/him1411\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/him1411\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/him1411\/orgs","repos_url":"https:\/\/api.github.com\/users\/him1411\/repos","events_url":"https:\/\/api.github.com\/users\/him1411\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/him1411\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-23T10:58:14Z","updated_at":"2021-08-03T05:08:54Z","closed_at":"2021-08-03T05:08:54Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I tried this to get the dataset following this url : https:\/\/huggingface.co\/datasets\/ade_corpus_v2\r\n\r\nbut received this error : \r\n\r\n`Traceback (most recent call last):\r\n File \"\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 486, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/ade_corpus_v2\/ade_corpus_v2.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 278, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 486, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/ade_corpus_v2\/ade_corpus_v2.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 589, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 282, in prepare_module\r\n combined_path, github_file_path, file_path\r\nFileNotFoundError: Couldn't find file locally at ade_corpus_v2\/ade_corpus_v2.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/ade_corpus_v2\/ade_corpus_v2.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/ade_corpus_v2\/ade_corpus_v2.py`\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1624\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1624\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1623","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1623\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1623\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1623\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1623","id":772950710,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ0MTI2ODQ4","number":1623,"title":"Add CLIMATE-FEVER dataset","user":{"login":"tdiggelm","id":1658969,"node_id":"MDQ6VXNlcjE2NTg5Njk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1658969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tdiggelm","html_url":"https:\/\/github.com\/tdiggelm","followers_url":"https:\/\/api.github.com\/users\/tdiggelm\/followers","following_url":"https:\/\/api.github.com\/users\/tdiggelm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tdiggelm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tdiggelm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tdiggelm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tdiggelm\/orgs","repos_url":"https:\/\/api.github.com\/users\/tdiggelm\/repos","events_url":"https:\/\/api.github.com\/users\/tdiggelm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tdiggelm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-22T13:34:05Z","updated_at":"2020-12-22T17:53:53Z","closed_at":"2020-12-22T17:53:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1623","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1623","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1623.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1623.patch","merged_at":"2020-12-22T17:53:53Z"},"body":"As suggested by @SBrandeis , fresh PR that adds CLIMATE-FEVER. Replaces PR #1579.\r\n\r\n---\r\n\r\nA dataset adopting the FEVER methodology that consists of 1,535 real-world claims regarding climate-change collected on the internet. Each claim is accompanied by five manually annotated evidence sentences retrieved from the English Wikipedia that support, refute or do not give enough information to validate the claim totalling in 7,675 claim-evidence pairs. The dataset features challenging claims that relate multiple facets and disputed cases of claims where both supporting and refuting evidence are present.\r\n\r\nMore information can be found at:\r\n\r\n* Homepage: http:\/\/climatefever.ai\r\n* Paper: https:\/\/arxiv.org\/abs\/2012.00614","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1623\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1623\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1622","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1622\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1622\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1622\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1622","id":772940768,"node_id":"MDU6SXNzdWU3NzI5NDA3Njg=","number":1622,"title":"Can't call shape on the output of select()","user":{"login":"noaonoszko","id":47183162,"node_id":"MDQ6VXNlcjQ3MTgzMTYy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47183162?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/noaonoszko","html_url":"https:\/\/github.com\/noaonoszko","followers_url":"https:\/\/api.github.com\/users\/noaonoszko\/followers","following_url":"https:\/\/api.github.com\/users\/noaonoszko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/noaonoszko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/noaonoszko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/noaonoszko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/noaonoszko\/orgs","repos_url":"https:\/\/api.github.com\/users\/noaonoszko\/repos","events_url":"https:\/\/api.github.com\/users\/noaonoszko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/noaonoszko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-22T13:18:40Z","updated_at":"2020-12-23T13:37:13Z","closed_at":"2020-12-23T13:37:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I get the error `TypeError: tuple expected at most 1 argument, got 2` when calling `shape` on the output of `select()`.\r\nIt's line 531 in shape in arrow_dataset.py that causes the problem:\r\n``return tuple(self._indices.num_rows, self._data.num_columns)``\r\nThis makes sense, since `tuple(num1, num2)` is not a valid call.\r\n \r\nFull code to reproduce:\r\n\r\n```python\r\ndataset = load_dataset(\"cnn_dailymail\", \"3.0.0\")\r\ntrain_set = dataset[\"train\"]\r\nt = train_set.select(range(10))\r\nprint(t.shape)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1622\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1622\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1621","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1621\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1621\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1621\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1621","id":772940417,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQ0MTE4MTAz","number":1621,"title":"updated dutch_social.py for loading jsonl (lines instead of list) files","user":{"login":"skyprince999","id":9033954,"node_id":"MDQ6VXNlcjkwMzM5NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9033954?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/skyprince999","html_url":"https:\/\/github.com\/skyprince999","followers_url":"https:\/\/api.github.com\/users\/skyprince999\/followers","following_url":"https:\/\/api.github.com\/users\/skyprince999\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/skyprince999\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/skyprince999\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/skyprince999\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/skyprince999\/orgs","repos_url":"https:\/\/api.github.com\/users\/skyprince999\/repos","events_url":"https:\/\/api.github.com\/users\/skyprince999\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/skyprince999\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-22T13:18:11Z","updated_at":"2020-12-23T11:51:51Z","closed_at":"2020-12-23T11:51:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1621","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1621","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1621.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1621.patch","merged_at":"2020-12-23T11:51:51Z"},"body":"the data_loader is modified to load files on the fly. Earlier it was reading the entire file and then processing the records\r\n\r\nPls refer to previous PR #1321 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1621\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1621\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1620","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1620\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1620\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1620\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1620","id":772620056,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQzODUxNTY3","number":1620,"title":"Adding myPOS2017 dataset","user":{"login":"hungluumfc","id":69781878,"node_id":"MDQ6VXNlcjY5NzgxODc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69781878?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hungluumfc","html_url":"https:\/\/github.com\/hungluumfc","followers_url":"https:\/\/api.github.com\/users\/hungluumfc\/followers","following_url":"https:\/\/api.github.com\/users\/hungluumfc\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hungluumfc\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hungluumfc\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hungluumfc\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hungluumfc\/orgs","repos_url":"https:\/\/api.github.com\/users\/hungluumfc\/repos","events_url":"https:\/\/api.github.com\/users\/hungluumfc\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hungluumfc\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-22T04:04:55Z","updated_at":"2021-01-29T10:23:37Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1620","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1620","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1620.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1620.patch","merged_at":null},"body":"myPOS Corpus (Myanmar Part-of-Speech Corpus) for Myanmar language NLP Research and Developments","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1620\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1620\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1619","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1619\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1619\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1619\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1619","id":772508558,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQzNzYyMTUw","number":1619,"title":"data loader for reading comprehension task","user":{"login":"songfeng","id":2062185,"node_id":"MDQ6VXNlcjIwNjIxODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2062185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/songfeng","html_url":"https:\/\/github.com\/songfeng","followers_url":"https:\/\/api.github.com\/users\/songfeng\/followers","following_url":"https:\/\/api.github.com\/users\/songfeng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/songfeng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/songfeng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/songfeng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/songfeng\/orgs","repos_url":"https:\/\/api.github.com\/users\/songfeng\/repos","events_url":"https:\/\/api.github.com\/users\/songfeng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/songfeng\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-21T22:40:34Z","updated_at":"2020-12-28T10:32:53Z","closed_at":"2020-12-28T10:32:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1619","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1619","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1619.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1619.patch","merged_at":"2020-12-28T10:32:53Z"},"body":"added doc2dial data loader and dummy data for reading comprehension task.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1619\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1619\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1618","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1618\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1618\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1618\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1618","id":772248730,"node_id":"MDU6SXNzdWU3NzIyNDg3MzA=","number":1618,"title":"Can't filter language:EN on https:\/\/huggingface.co\/datasets","user":{"login":"davidefiocco","id":4547987,"node_id":"MDQ6VXNlcjQ1NDc5ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4547987?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/davidefiocco","html_url":"https:\/\/github.com\/davidefiocco","followers_url":"https:\/\/api.github.com\/users\/davidefiocco\/followers","following_url":"https:\/\/api.github.com\/users\/davidefiocco\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/davidefiocco\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/davidefiocco\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/davidefiocco\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/davidefiocco\/orgs","repos_url":"https:\/\/api.github.com\/users\/davidefiocco\/repos","events_url":"https:\/\/api.github.com\/users\/davidefiocco\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/davidefiocco\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-21T15:23:23Z","updated_at":"2020-12-22T17:17:00Z","closed_at":"2020-12-22T17:16:09Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When visiting https:\/\/huggingface.co\/datasets, I don't see an obvious way to filter only English datasets. This is unexpected for me, am I missing something? I'd expect English to be selectable in the language widget. This problem reproduced on Mozilla Firefox and MS Edge:\r\n\r\n![screenshot](https:\/\/user-images.githubusercontent.com\/4547987\/102792244-892e1f00-43a8-11eb-9e89-4826ca201a87.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1618\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1618\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1617","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1617\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1617\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1617\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1617","id":772084764,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQzNDE5MTM5","number":1617,"title":"cifar10 initial commit","user":{"login":"czabo","id":75574105,"node_id":"MDQ6VXNlcjc1NTc0MTA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75574105?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/czabo","html_url":"https:\/\/github.com\/czabo","followers_url":"https:\/\/api.github.com\/users\/czabo\/followers","following_url":"https:\/\/api.github.com\/users\/czabo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/czabo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/czabo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/czabo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/czabo\/orgs","repos_url":"https:\/\/api.github.com\/users\/czabo\/repos","events_url":"https:\/\/api.github.com\/users\/czabo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/czabo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-21T11:18:50Z","updated_at":"2020-12-22T10:18:05Z","closed_at":"2020-12-22T10:11:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1617","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1617","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1617.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1617.patch","merged_at":"2020-12-22T10:11:28Z"},"body":"CIFAR-10 dataset. Didn't add the tagging since there are no vision related tags.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1617\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1617\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1616","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1616\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1616\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1616\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1616","id":772074229,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQzNDEwNDc1","number":1616,"title":"added TurkishMovieSentiment dataset","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-21T11:03:16Z","updated_at":"2020-12-24T07:08:41Z","closed_at":"2020-12-23T16:50:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1616","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1616","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1616.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1616.patch","merged_at":"2020-12-23T16:50:06Z"},"body":"This PR adds the **TurkishMovieSentiment: This dataset contains turkish movie reviews.**\r\n\r\n- **Homepage:** [https:\/\/www.kaggle.com\/mustfkeskin\/turkish-movie-sentiment-analysis-dataset\/tasks](https:\/\/www.kaggle.com\/mustfkeskin\/turkish-movie-sentiment-analysis-dataset\/tasks)\r\n- **Point of Contact:** [Mustafa Keskin](https:\/\/www.linkedin.com\/in\/mustfkeskin\/)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1616\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1616\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1615","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1615\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1615\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1615\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1615","id":771641088,"node_id":"MDU6SXNzdWU3NzE2NDEwODg=","number":1615,"title":"Bug: Can't download TriviaQA with `load_dataset` - custom `cache_dir`","user":{"login":"SapirWeissbuch","id":44585792,"node_id":"MDQ6VXNlcjQ0NTg1Nzky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44585792?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SapirWeissbuch","html_url":"https:\/\/github.com\/SapirWeissbuch","followers_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/followers","following_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/orgs","repos_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/repos","events_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-12-20T17:27:38Z","updated_at":"2021-06-25T13:11:33Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\nI'm having issue downloading TriviaQA dataset with `load_dataset`.\r\n\r\n## Environment info\r\n- `datasets` version: 1.1.3\r\n- Platform: Linux-4.19.129-aufs-1-x86_64-with-debian-10.1\r\n- Python version: 3.7.3\r\n\r\n## The code I'm running:\r\n```python\r\nimport datasets\r\ndataset = datasets.load_dataset(\"trivia_qa\", \"rc\", cache_dir = \".\/datasets\")\r\n```\r\n\r\n## The output:\r\n1. Download begins:\r\n```\r\nDownloading and preparing dataset trivia_qa\/rc (download: 2.48 GiB, generated: 14.92 GiB, post-processed: Unknown size, total: 17.40 GiB) to \/cs\/labs\/gabis\/sapirweissbuch\/tr\r\nivia_qa\/rc\/1.1.0\/e734e28133f4d9a353af322aa52b9f266f6f27cbf2f072690a1694e577546b0d... \r\nDownloading: 17%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 446M\/2.67G [00:37<04:45, 7.77MB\/s]\r\n```\r\n2. 100% is reached\r\n3. It got stuck here for about an hour, and added additional 30G of data to \".\/datasets\" directory. I killed the process eventually.\r\n\r\nA similar issue can be observed in Google Colab:\r\n\r\nhttps:\/\/colab.research.google.com\/drive\/1nn1Lw02GhfGFylzbS2j6yksGjPo7kkN-?usp=sharing\r\n\r\n## Expected behaviour:\r\nThe dataset \"TriviaQA\" should be successfully downloaded.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1615\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1615\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1613","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1613\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1613\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1613\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1613","id":771577050,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQzMDYwNzEx","number":1613,"title":"Add id_clickbait","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-20T12:24:49Z","updated_at":"2020-12-22T17:45:27Z","closed_at":"2020-12-22T17:45:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1613","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1613","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1613.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1613.patch","merged_at":"2020-12-22T17:45:27Z"},"body":"This is the CLICK-ID dataset, a collection of annotated clickbait Indonesian news headlines that was collected from 12 local online news ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1613\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1613\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1612","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1612\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1612\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1612\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1612","id":771558160,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQzMDQ3NjQ1","number":1612,"title":"Adding wiki asp dataset as new PR","user":{"login":"katnoria","id":7674948,"node_id":"MDQ6VXNlcjc2NzQ5NDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7674948?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/katnoria","html_url":"https:\/\/github.com\/katnoria","followers_url":"https:\/\/api.github.com\/users\/katnoria\/followers","following_url":"https:\/\/api.github.com\/users\/katnoria\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/katnoria\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/katnoria\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/katnoria\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/katnoria\/orgs","repos_url":"https:\/\/api.github.com\/users\/katnoria\/repos","events_url":"https:\/\/api.github.com\/users\/katnoria\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/katnoria\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-20T10:25:08Z","updated_at":"2020-12-21T14:13:33Z","closed_at":"2020-12-21T14:13:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1612","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1612","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1612.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1612.patch","merged_at":"2020-12-21T14:13:33Z"},"body":"Hi @lhoestq, Adding wiki asp as new branch because #1539 has other commits. This version has dummy data for each domain <20\/30KB.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1612\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1612\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1611","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1611\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1611\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1611\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1611","id":771486456,"node_id":"MDU6SXNzdWU3NzE0ODY0NTY=","number":1611,"title":"shuffle with torch generator ","user":{"login":"rabeehkarimimahabadi","id":73364383,"node_id":"MDQ6VXNlcjczMzY0Mzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73364383?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi","html_url":"https:\/\/github.com\/rabeehkarimimahabadi","followers_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-12-20T00:57:14Z","updated_at":"2020-12-21T18:12:19Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI need to shuffle mutliple large datasets with `generator = torch.Generator()` for a distributed sampler which needs to make sure datasets are consistent across different cores, for this, this is really necessary for me to use torch generator, based on documentation this generator is not supported with datasets, I really need to make shuffle work with this generator and I was wondering what I can do about this issue, thanks for your help \r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1611\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1611\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1610","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1610\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1610\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1610\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1610","id":771453599,"node_id":"MDU6SXNzdWU3NzE0NTM1OTk=","number":1610,"title":"shuffle does not accept seed ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-19T20:59:39Z","updated_at":"2021-01-04T10:00:03Z","closed_at":"2021-01-04T10:00:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI need to shuffle the dataset, but this needs to be based on epoch+seed to be consistent across the cores, when I pass seed to shuffle, this does not accept seed, could you assist me with this? thanks @lhoestq\r\n ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1610\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1610\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1609","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1609\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1609\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1609\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1609","id":771421881,"node_id":"MDU6SXNzdWU3NzE0MjE4ODE=","number":1609,"title":"Not able to use 'jigsaw_toxicity_pred' dataset","user":{"login":"jassimran","id":7424133,"node_id":"MDQ6VXNlcjc0MjQxMzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7424133?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jassimran","html_url":"https:\/\/github.com\/jassimran","followers_url":"https:\/\/api.github.com\/users\/jassimran\/followers","following_url":"https:\/\/api.github.com\/users\/jassimran\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jassimran\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jassimran\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jassimran\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jassimran\/orgs","repos_url":"https:\/\/api.github.com\/users\/jassimran\/repos","events_url":"https:\/\/api.github.com\/users\/jassimran\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jassimran\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-19T17:35:48Z","updated_at":"2020-12-22T16:42:24Z","closed_at":"2020-12-22T16:42:23Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":" When trying to use jigsaw_toxicity_pred dataset, like this in a [colab](https:\/\/colab.research.google.com\/drive\/1LwO2A5M2X5dvhkAFYE4D2CUT3WUdWnkn?usp=sharing):\r\n```\r\nfrom datasets import list_datasets, list_metrics, load_dataset, load_metric\r\n\r\nds = load_dataset(\"jigsaw_toxicity_pred\")\r\n```\r\n \r\nI see below error:\r\n\r\n> FileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/jigsaw_toxicity_pred\/jigsaw_toxicity_pred.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nFileNotFoundError Traceback (most recent call last)\r\nFileNotFoundError: Couldn't find file at https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/jigsaw_toxicity_pred\/jigsaw_toxicity_pred.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nFileNotFoundError Traceback (most recent call last)\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, **download_kwargs)\r\n 280 raise FileNotFoundError(\r\n 281 \"Couldn't find file locally at {}, or remotely at {} or {}\".format(\r\n--> 282 combined_path, github_file_path, file_path\r\n 283 )\r\n 284 )\r\n\r\nFileNotFoundError: Couldn't find file locally at jigsaw_toxicity_pred\/jigsaw_toxicity_pred.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/jigsaw_toxicity_pred\/jigsaw_toxicity_pred.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/jigsaw_toxicity_pred\/jigsaw_toxicity_pred.py","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1609\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1609\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1608","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1608\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1608\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1608\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1608","id":771329434,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQyODkyMTQ4","number":1608,"title":"adding ted_talks_iwslt","user":{"login":"skyprince999","id":9033954,"node_id":"MDQ6VXNlcjkwMzM5NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9033954?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/skyprince999","html_url":"https:\/\/github.com\/skyprince999","followers_url":"https:\/\/api.github.com\/users\/skyprince999\/followers","following_url":"https:\/\/api.github.com\/users\/skyprince999\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/skyprince999\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/skyprince999\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/skyprince999\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/skyprince999\/orgs","repos_url":"https:\/\/api.github.com\/users\/skyprince999\/repos","events_url":"https:\/\/api.github.com\/users\/skyprince999\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/skyprince999\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-19T07:36:41Z","updated_at":"2021-01-02T15:44:12Z","closed_at":"2021-01-02T15:44:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1608","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1608","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1608.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1608.patch","merged_at":null},"body":"UPDATE2: (2nd Jan) Wrote a long writeup on the slack channel. I don't think this approach is correct. Basically this created language pairs (109*108) \r\nRunning the `pytest `went for more than 40+ hours and it was still running! \r\nSo working on a different approach, such that the number of configs = number of languages. Will make a new pull request with that. \r\n\r\nUPDATE: This requires manual download dataset\r\n\r\nThis is a draft version ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1608\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1608\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1607","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1607\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1607\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1607\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1607","id":771325852,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQyODg5OTky","number":1607,"title":"modified tweets hate speech detection","user":{"login":"darshan-gandhi","id":44197177,"node_id":"MDQ6VXNlcjQ0MTk3MTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44197177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/darshan-gandhi","html_url":"https:\/\/github.com\/darshan-gandhi","followers_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/followers","following_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/orgs","repos_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/repos","events_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-19T07:13:40Z","updated_at":"2020-12-21T16:08:48Z","closed_at":"2020-12-21T16:08:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1607","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1607","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1607.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1607.patch","merged_at":"2020-12-21T16:08:48Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1607\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1607\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1606","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1606\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1606\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1606\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1606","id":771116455,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQyNzMwNTEw","number":1606,"title":"added Semantic Scholar Open Research Corpus","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-18T19:21:24Z","updated_at":"2021-02-03T09:30:59Z","closed_at":"2021-02-03T09:30:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1606","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1606","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1606.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1606.patch","merged_at":"2021-02-03T09:30:59Z"},"body":"I picked up this dataset [Semantic Scholar Open Research Corpus](https:\/\/allenai.org\/data\/s2orc) but it contains 6000 files to be downloaded. I tried the current code with 100 files and it worked fine (took ~15GB space). For 6000 files it would occupy ~900GB space which I don\u2019t have. Can someone from the HF team with that much of disk space help me with generate dataset_infos and dummy_data?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1606\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1606\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1605","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1605\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1605\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1605\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1605","id":770979620,"node_id":"MDU6SXNzdWU3NzA5Nzk2MjA=","number":1605,"title":"Navigation version breaking","user":{"login":"mttk","id":3007947,"node_id":"MDQ6VXNlcjMwMDc5NDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3007947?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mttk","html_url":"https:\/\/github.com\/mttk","followers_url":"https:\/\/api.github.com\/users\/mttk\/followers","following_url":"https:\/\/api.github.com\/users\/mttk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mttk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mttk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mttk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mttk\/orgs","repos_url":"https:\/\/api.github.com\/users\/mttk\/repos","events_url":"https:\/\/api.github.com\/users\/mttk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mttk\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-18T15:36:24Z","updated_at":"2020-12-18T15:41:52Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, \r\n\r\nwhen navigating docs (Chrome, Ubuntu) (e.g. on this page: https:\/\/huggingface.co\/docs\/datasets\/loading_metrics.html#using-a-custom-metric-script) the version control dropdown has the wrong string displayed as the current version: \r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/3007947\/102632187-02cad080-414f-11eb-813b-28f3c8d80def.png)\r\n\r\n**Edit:** this actually happens _only_ if you open a link to a concrete subsection.\r\n\r\nIMO, the best way to fix this without getting too deep into the intricacies of retrieving version numbers from the URL would be to change [this](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/docs\/source\/_static\/js\/custom.js#L112) line to:\r\n```\r\nlet label = (version in versionMapping) ? version : stableVersion\r\n```\r\nwhich delegates the check to the (already maintained) keys of the version mapping dictionary & should be more robust. There's a similar ternary expression [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/docs\/source\/_static\/js\/custom.js#L97) which should also fail in this case.\r\n\r\nI'd also suggest swapping this [block](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/docs\/source\/_static\/js\/custom.js#L80-L90) to `string.contains(version) for version in versionMapping` which might be more robust. I'd add a PR myself but I'm by no means competent in JS :)\r\n\r\nI also have a side question wrt. docs versioning: I'm trying to make docs for a project which are versioned alike to your dropdown versioning. I was wondering how do you handle storage of multiple doc versions on your server? Do you update what `https:\/\/huggingface.co\/docs\/datasets` points to for every stable release & manually create new folders for each released version?\r\nSo far I'm building & publishing (scping) the docs to the server with a github action which works well for a single version, but would ideally need to reorder the public files triggered on a new release.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1605\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1605\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1604","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1604\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1604\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1604\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1604","id":770862112,"node_id":"MDU6SXNzdWU3NzA4NjIxMTI=","number":1604,"title":"Add tests for the download functions ?","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-18T12:49:25Z","updated_at":"2020-12-18T12:49:25Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"AFAIK the download functions in `DownloadManager` are not tested yet. It could be good to add some to ensure behavior is as expected.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1604\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1604\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1603","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1603\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1603\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1603\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1603","id":770857221,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQyNTIwNDkx","number":1603,"title":"Add retries to HTTP requests","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-18T12:41:31Z","updated_at":"2020-12-22T15:34:07Z","closed_at":"2020-12-22T15:34:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1603","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1603","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1603.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1603.patch","merged_at":"2020-12-22T15:34:06Z"},"body":"## What does this PR do ?\r\n\r\nAdding retries to HTTP GET & HEAD requests, when they fail with a `ConnectTimeout` exception.\r\n\r\nThe \"canonical\" way to do this is to use [urllib's Retry class](https:\/\/urllib3.readthedocs.io\/en\/latest\/reference\/urllib3.util.html#urllib3.util.Retry) and wrap it in a [HttpAdapter](https:\/\/requests.readthedocs.io\/en\/master\/api\/#requests.adapters.HTTPAdapter). Seems a bit overkill to me, plus it forces us to use the `requests.Session` object. I prefer this simpler implementation. I'm open to remarks and suggestions @lhoestq @yjernite \r\n\r\nFixes #1102 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1603\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1603\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1602","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1602\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1602\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1602\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1602","id":770841810,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQyNTA4NTM4","number":1602,"title":"second update of id_newspapers_2018","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-18T12:16:37Z","updated_at":"2020-12-22T10:41:15Z","closed_at":"2020-12-22T10:41:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1602","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1602","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1602.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1602.patch","merged_at":"2020-12-22T10:41:14Z"},"body":"The feature \"url\" is currently set wrongly to data[\"date\"], this PR fix it to data[\"url\"].\r\nI added also an additional POC.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1602\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1602\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1601","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1601\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1601\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1601\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1601","id":770758914,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQyNDQzNDE3","number":1601,"title":"second update of the id_newspapers_2018","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-18T10:10:20Z","updated_at":"2020-12-18T12:15:31Z","closed_at":"2020-12-18T12:15:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1601","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1601","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1601.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1601.patch","merged_at":null},"body":"The feature \"url\" is currently set wrongly to data[\"date\"], this PR fix it to data[\"url\"].\r\nI added also an additional POC.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1601\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1601\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1600","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1600\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1600\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1600\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1600","id":770582960,"node_id":"MDU6SXNzdWU3NzA1ODI5NjA=","number":1600,"title":"AttributeError: 'DatasetDict' object has no attribute 'train_test_split'","user":{"login":"david-waterworth","id":5028974,"node_id":"MDQ6VXNlcjUwMjg5NzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5028974?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/david-waterworth","html_url":"https:\/\/github.com\/david-waterworth","followers_url":"https:\/\/api.github.com\/users\/david-waterworth\/followers","following_url":"https:\/\/api.github.com\/users\/david-waterworth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/david-waterworth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/david-waterworth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/david-waterworth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/david-waterworth\/orgs","repos_url":"https:\/\/api.github.com\/users\/david-waterworth\/repos","events_url":"https:\/\/api.github.com\/users\/david-waterworth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/david-waterworth\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-18T05:37:10Z","updated_at":"2021-06-15T11:25:46Z","closed_at":"2020-12-21T07:38:58Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The following code fails with \"'DatasetDict' object has no attribute 'train_test_split'\" - am I doing something wrong?\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('csv', data_files='data.txt')\r\ndataset = dataset.train_test_split(test_size=0.1)\r\n```\r\n\r\n> AttributeError: 'DatasetDict' object has no attribute 'train_test_split'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1600\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1600\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1599","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1599\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1599\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1599\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1599","id":770431389,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQyMTgwMzI4","number":1599,"title":"add Korean Sarcasm Dataset","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-17T22:49:56Z","updated_at":"2021-09-17T16:54:32Z","closed_at":"2020-12-23T17:25:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1599","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1599","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1599.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1599.patch","merged_at":"2020-12-23T17:25:59Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1599\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1599\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1598","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1598\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1598\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1598\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1598","id":770332440,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQyMDk2NTM4","number":1598,"title":"made suggested changes in fake-news-english","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-17T20:06:29Z","updated_at":"2020-12-18T09:43:58Z","closed_at":"2020-12-18T09:43:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1598","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1598","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1598.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1598.patch","merged_at":"2020-12-18T09:43:57Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1598\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1598\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1597","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1597\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1597\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1597\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1597","id":770276140,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQyMDUwMTc5","number":1597,"title":"adding hate-speech-and-offensive-language","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-17T18:35:15Z","updated_at":"2020-12-23T23:27:17Z","closed_at":"2020-12-23T23:27:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1597","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1597","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1597.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1597.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1597\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1597\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1596","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1596\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1596\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1596\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1596","id":770260531,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQyMDM3NTU0","number":1596,"title":"made suggested changes to hate-speech-and-offensive-language","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-17T18:09:26Z","updated_at":"2020-12-17T18:36:02Z","closed_at":"2020-12-17T18:35:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1596","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1596","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1596.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1596.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1596\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1596\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1595","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1595\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1595\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1595\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1595","id":770153693,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQxOTUwNDk4","number":1595,"title":"Logiqa en","user":{"login":"aclifton314","id":53267795,"node_id":"MDQ6VXNlcjUzMjY3Nzk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53267795?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aclifton314","html_url":"https:\/\/github.com\/aclifton314","followers_url":"https:\/\/api.github.com\/users\/aclifton314\/followers","following_url":"https:\/\/api.github.com\/users\/aclifton314\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aclifton314\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aclifton314\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aclifton314\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aclifton314\/orgs","repos_url":"https:\/\/api.github.com\/users\/aclifton314\/repos","events_url":"https:\/\/api.github.com\/users\/aclifton314\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aclifton314\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-12-17T15:42:00Z","updated_at":"2021-02-10T18:33:12Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1595","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1595","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1595.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1595.patch","merged_at":null},"body":"logiqa in english.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1595\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1595\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1594","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1594\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1594\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1594\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1594","id":769747767,"node_id":"MDU6SXNzdWU3Njk3NDc3Njc=","number":1594,"title":"connection error ","user":{"login":"rabeehkarimimahabadi","id":73364383,"node_id":"MDQ6VXNlcjczMzY0Mzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73364383?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi","html_url":"https:\/\/github.com\/rabeehkarimimahabadi","followers_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-17T09:18:34Z","updated_at":"2020-12-24T22:57:33Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am hitting to this error, thanks \r\n\r\n```\r\n> Traceback (most recent call last):\r\n File \"finetune_t5_trainer.py\", line 379, in \r\n main()\r\n File \"finetune_t5_trainer.py\", line 208, in main\r\n if training_args.do_eval or training_args.evaluation_strategy != EvaluationStrategy.NO\r\n File \"finetune_t5_trainer.py\", line 207, in \r\n for task in data_args.eval_tasks}\r\n File \"\/workdir\/seq2seq\/data\/tasks.py\", line 70, in get_dataset\r\n dataset = self.load_dataset(split=split)\r\n File \"\/workdir\/seq2seq\/data\/tasks.py\", line 66, in load_dataset\r\n return datasets.load_dataset(self.task.name, split=split, script_version=\"master\")\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py\", line 589, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py\", line 487, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/boolq\/boolq.py\r\nel\/0 I1217 01:11:33.898849 354161 main shadow.py:210 Current job status: FINISHED\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1594\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1594\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1593","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1593\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1593\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1593\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1593","id":769611386,"node_id":"MDU6SXNzdWU3Njk2MTEzODY=","number":1593,"title":"Access to key in DatasetDict map","user":{"login":"ZhaofengWu","id":11954789,"node_id":"MDQ6VXNlcjExOTU0Nzg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11954789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZhaofengWu","html_url":"https:\/\/github.com\/ZhaofengWu","followers_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/followers","following_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/repos","events_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-17T07:02:20Z","updated_at":"2021-01-13T10:38:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It is possible that we want to do different things in the `map` function (and possibly other functions too) of a `DatasetDict`, depending on the key. I understand that `DatasetDict.map` is a really thin wrapper of `Dataset.map`, so it is easy to directly implement this functionality in the client code. Still, it'd be nice if there can be a flag, similar to `with_indices`, that allows the callable to know the key inside `DatasetDict`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1593\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1593\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1592","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1592\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1592\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1592\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1592","id":769529421,"node_id":"MDU6SXNzdWU3Njk1Mjk0MjE=","number":1592,"title":"Using datasets.Metric with Trainer()","user":{"login":"YipingNUS","id":5652584,"node_id":"MDQ6VXNlcjU2NTI1ODQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5652584?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/YipingNUS","html_url":"https:\/\/github.com\/YipingNUS","followers_url":"https:\/\/api.github.com\/users\/YipingNUS\/followers","following_url":"https:\/\/api.github.com\/users\/YipingNUS\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/YipingNUS\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/YipingNUS\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/YipingNUS\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/YipingNUS\/orgs","repos_url":"https:\/\/api.github.com\/users\/YipingNUS\/repos","events_url":"https:\/\/api.github.com\/users\/YipingNUS\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/YipingNUS\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-17T05:17:04Z","updated_at":"2020-12-17T11:49:04Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Using datasets.Metric with Trainer()\r\n\r\nHi team, I was quite surprised in the [Metric documentation](https:\/\/huggingface.co\/docs\/datasets\/using_metrics.html) I don't see how it can be used with `Trainer()`. That would be the most intuitive use case instead of having to iterate the batches and add predictions and references to the metric, then compute the metric manually. Ideally, any pre-built metrics can be added to `compute_metrics` argument of `Trainer()` and they will be calculated at an interval specified by `TrainingArguments.evaluation_strategy`. \r\n\r\nIs this option available but just not mentioned in the documentation or it's not possible at the moment? I notice in the [Transformer | Training and fine-tuning](https:\/\/huggingface.co\/transformers\/training.html) tutorial, you are using custom scripts to calculate the accuracy, P\/R\/F, which are already in the pre-built metrics.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1592\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1592\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1591","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1591\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1591\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1591\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1591","id":769383714,"node_id":"MDU6SXNzdWU3NjkzODM3MTQ=","number":1591,"title":"IWSLT-17 Link Broken","user":{"login":"ZhaofengWu","id":11954789,"node_id":"MDQ6VXNlcjExOTU0Nzg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11954789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZhaofengWu","html_url":"https:\/\/github.com\/ZhaofengWu","followers_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/followers","following_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/repos","events_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZhaofengWu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892865,"node_id":"MDU6TGFiZWwxOTM1ODkyODY1","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/duplicate","name":"duplicate","color":"cfd3d7","default":true,"description":"This issue or pull request already exists"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-17T00:46:42Z","updated_at":"2020-12-18T08:06:36Z","closed_at":"2020-12-18T08:05:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nFileNotFoundError: Couldn't find file at https:\/\/wit3.fbk.eu\/archive\/2017-01-trnmted\/\/texts\/DeEnItNlRo\/DeEnItNlRo\/DeEnItNlRo-DeEnItNlRo.tgz\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1591\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1591\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1590","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1590\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1590\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1590\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1590","id":769242858,"node_id":"MDU6SXNzdWU3NjkyNDI4NTg=","number":1590,"title":"Add helper to resolve namespace collision","user":{"login":"jramapuram","id":8204807,"node_id":"MDQ6VXNlcjgyMDQ4MDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8204807?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jramapuram","html_url":"https:\/\/github.com\/jramapuram","followers_url":"https:\/\/api.github.com\/users\/jramapuram\/followers","following_url":"https:\/\/api.github.com\/users\/jramapuram\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jramapuram\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jramapuram\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jramapuram\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jramapuram\/orgs","repos_url":"https:\/\/api.github.com\/users\/jramapuram\/repos","events_url":"https:\/\/api.github.com\/users\/jramapuram\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jramapuram\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-16T20:17:24Z","updated_at":"2020-12-19T03:40:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Many projects use a module called `datasets`, however this is incompatible with huggingface datasets. It would be great if there if there was some helper or similar function to resolve such a common conflict. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1590\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1590\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1589","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1589\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1589\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1589\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1589","id":769187141,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQxMzcwMTM0","number":1589,"title":"Update doc2dial.py","user":{"login":"songfeng","id":2062185,"node_id":"MDQ6VXNlcjIwNjIxODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2062185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/songfeng","html_url":"https:\/\/github.com\/songfeng","followers_url":"https:\/\/api.github.com\/users\/songfeng\/followers","following_url":"https:\/\/api.github.com\/users\/songfeng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/songfeng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/songfeng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/songfeng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/songfeng\/orgs","repos_url":"https:\/\/api.github.com\/users\/songfeng\/repos","events_url":"https:\/\/api.github.com\/users\/songfeng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/songfeng\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-16T18:50:56Z","updated_at":"2020-12-21T17:18:31Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1589","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1589","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1589.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1589.patch","merged_at":null},"body":"Added data loader for machine reading comprehension tasks proposed in the Doc2Dial EMNLP 2020 paper.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1589\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1589\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1588","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1588\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1588\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1588\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1588","id":769068227,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQxMjg3OTcz","number":1588,"title":"Modified hind encorp","user":{"login":"rahul-art","id":56379013,"node_id":"MDQ6VXNlcjU2Mzc5MDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56379013?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rahul-art","html_url":"https:\/\/github.com\/rahul-art","followers_url":"https:\/\/api.github.com\/users\/rahul-art\/followers","following_url":"https:\/\/api.github.com\/users\/rahul-art\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rahul-art\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rahul-art\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rahul-art\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rahul-art\/orgs","repos_url":"https:\/\/api.github.com\/users\/rahul-art\/repos","events_url":"https:\/\/api.github.com\/users\/rahul-art\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rahul-art\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-16T16:28:14Z","updated_at":"2020-12-16T22:41:53Z","closed_at":"2020-12-16T17:20:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1588","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1588","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1588.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1588.patch","merged_at":"2020-12-16T17:20:28Z"},"body":"description added, unnecessary comments removed from .py and readme.md reformated \r\n@lhoestq for #1584","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1588\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1588\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1587","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1587\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1587\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1587\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1587","id":768929877,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQxMjAwMDk3","number":1587,"title":"Add nq_open question answering dataset ","user":{"login":"Nilanshrajput","id":28673745,"node_id":"MDQ6VXNlcjI4NjczNzQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28673745?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Nilanshrajput","html_url":"https:\/\/github.com\/Nilanshrajput","followers_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/followers","following_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/orgs","repos_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/repos","events_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-16T14:22:08Z","updated_at":"2020-12-17T16:07:10Z","closed_at":"2020-12-17T16:07:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1587","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1587","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1587.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1587.patch","merged_at":"2020-12-17T16:07:10Z"},"body":"this is pr is a copy of #1506 due to messed up git history in that pr.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1587\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1587\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1586","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1586\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1586\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1586\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1586","id":768864502,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQxMTY0MDc2","number":1586,"title":"added irc disentangle dataset","user":{"login":"dhruvjoshi1998","id":32560035,"node_id":"MDQ6VXNlcjMyNTYwMDM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32560035?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dhruvjoshi1998","html_url":"https:\/\/github.com\/dhruvjoshi1998","followers_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/followers","following_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/orgs","repos_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/repos","events_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-16T13:25:58Z","updated_at":"2021-01-29T10:28:53Z","closed_at":"2021-01-29T10:28:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1586","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1586","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1586.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1586.patch","merged_at":"2021-01-29T10:28:53Z"},"body":"added irc disentanglement dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1586\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1586\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1585","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1585\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1585\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1585\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1585","id":768831171,"node_id":"MDU6SXNzdWU3Njg4MzExNzE=","number":1585,"title":"FileNotFoundError for `amazon_polarity`","user":{"login":"phtephanx","id":24647404,"node_id":"MDQ6VXNlcjI0NjQ3NDA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24647404?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/phtephanx","html_url":"https:\/\/github.com\/phtephanx","followers_url":"https:\/\/api.github.com\/users\/phtephanx\/followers","following_url":"https:\/\/api.github.com\/users\/phtephanx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/phtephanx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/phtephanx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/phtephanx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/phtephanx\/orgs","repos_url":"https:\/\/api.github.com\/users\/phtephanx\/repos","events_url":"https:\/\/api.github.com\/users\/phtephanx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/phtephanx\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-16T12:51:05Z","updated_at":"2020-12-16T16:02:56Z","closed_at":"2020-12-16T16:02:56Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Version: `datasets==v1.1.3`\r\n\r\n### Reproduction\r\n```python\r\nfrom datasets import load_dataset\r\ndata = load_dataset(\"amazon_polarity\")\r\n```\r\ncrashes with\r\n```bash\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/amazon_polarity\/amazon_polarity.py\r\n```\r\nand \r\n```bash\r\nFileNotFoundError: Couldn't find file at https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/amazon_polarity\/amazon_polarity.py\r\n```\r\nand\r\n```bash\r\nFileNotFoundError: Couldn't find file locally at amazon_polarity\/amazon_polarity.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/amazon_polarity\/amazon_polarity.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/amazon_polarity\/amazon_polarity.py\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1585\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1585\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1584","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1584\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1584\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1584\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1584","id":768820406,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQxMTM2OTQ5","number":1584,"title":"Load hind encorp","user":{"login":"rahul-art","id":56379013,"node_id":"MDQ6VXNlcjU2Mzc5MDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56379013?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rahul-art","html_url":"https:\/\/github.com\/rahul-art","followers_url":"https:\/\/api.github.com\/users\/rahul-art\/followers","following_url":"https:\/\/api.github.com\/users\/rahul-art\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rahul-art\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rahul-art\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rahul-art\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rahul-art\/orgs","repos_url":"https:\/\/api.github.com\/users\/rahul-art\/repos","events_url":"https:\/\/api.github.com\/users\/rahul-art\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rahul-art\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-16T12:38:38Z","updated_at":"2020-12-18T02:27:24Z","closed_at":"2020-12-18T02:27:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1584","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1584","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1584.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1584.patch","merged_at":null},"body":"reformated well documented, yaml tags added, code","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1584\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1584\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1583","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1583\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1583\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1583\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1583","id":768795986,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQxMTIyODEz","number":1583,"title":"Update metrics docstrings.","user":{"login":"Fraser-Greenlee","id":8402500,"node_id":"MDQ6VXNlcjg0MDI1MDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8402500?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Fraser-Greenlee","html_url":"https:\/\/github.com\/Fraser-Greenlee","followers_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/followers","following_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/orgs","repos_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/repos","events_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-16T12:14:18Z","updated_at":"2020-12-18T18:39:06Z","closed_at":"2020-12-18T18:39:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1583","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1583","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1583.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1583.patch","merged_at":"2020-12-18T18:39:06Z"},"body":"#1478 Correcting the argument descriptions for metrics.\r\n\r\nLet me know if there's any issues.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1583\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1583\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1582","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1582\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1582\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1582\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1582","id":768776617,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQxMTEwODU1","number":1582,"title":"Adding wiki lingua dataset as new branch","user":{"login":"katnoria","id":7674948,"node_id":"MDQ6VXNlcjc2NzQ5NDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7674948?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/katnoria","html_url":"https:\/\/github.com\/katnoria","followers_url":"https:\/\/api.github.com\/users\/katnoria\/followers","following_url":"https:\/\/api.github.com\/users\/katnoria\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/katnoria\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/katnoria\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/katnoria\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/katnoria\/orgs","repos_url":"https:\/\/api.github.com\/users\/katnoria\/repos","events_url":"https:\/\/api.github.com\/users\/katnoria\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/katnoria\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-16T11:53:07Z","updated_at":"2020-12-17T18:06:46Z","closed_at":"2020-12-17T18:06:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1582","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1582","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1582.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1582.patch","merged_at":"2020-12-17T18:06:45Z"},"body":"Adding the dataset as new branch as advised here: #1470\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1582\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1582\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1581","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1581\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1581\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1581\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1581","id":768320594,"node_id":"MDU6SXNzdWU3NjgzMjA1OTQ=","number":1581,"title":"Installing datasets and transformers in a tensorflow docker image throws Permission Error on 'import transformers'","user":{"login":"eduardofv","id":702586,"node_id":"MDQ6VXNlcjcwMjU4Ng==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/702586?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eduardofv","html_url":"https:\/\/github.com\/eduardofv","followers_url":"https:\/\/api.github.com\/users\/eduardofv\/followers","following_url":"https:\/\/api.github.com\/users\/eduardofv\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eduardofv\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eduardofv\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eduardofv\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eduardofv\/orgs","repos_url":"https:\/\/api.github.com\/users\/eduardofv\/repos","events_url":"https:\/\/api.github.com\/users\/eduardofv\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eduardofv\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-16T00:02:21Z","updated_at":"2021-06-17T15:40:45Z","closed_at":"2021-06-17T15:40:45Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am using a docker container, based on latest tensorflow-gpu image, to run transformers and datasets (4.0.1 and 1.1.3 respectively - Dockerfile attached below). Importing transformers throws a Permission Error to access `\/.cache`:\r\n\r\n```\r\n$ docker run --gpus=all --rm -it -u $(id -u):$(id -g) -v $(pwd)\/data:\/root\/data -v $(pwd):\/root -v $(pwd)\/models\/:\/root\/models -v $(pwd)\/saved_models\/:\/root\/saved_models -e \"HOST_HOSTNAME=$(hostname)\" hf-error:latest \/bin\/bash\r\n\r\n________ _______________ \r\n___ __\/__________________________________ ____\/__ \/________ __\r\n__ \/ _ _ \\_ __ \\_ ___\/ __ \\_ ___\/_ \/_ __ \/_ __ \\_ | \/| \/ \/\r\n_ \/ \/ __\/ \/ \/ \/(__ )\/ \/_\/ \/ \/ _ __\/ _ \/ \/ \/_\/ \/_ |\/ |\/ \/ \r\n\/_\/ \\___\/\/_\/ \/_\/\/____\/ \\____\/\/_\/ \/_\/ \/_\/ \\____\/____\/|__\/\r\n\r\n\r\nYou are running this container as user with ID 1000 and group 1000,\r\nwhich should map to the ID and group for your user on the Docker host. Great!\r\n\r\ntf-docker \/root > python\r\nPython 3.6.9 (default, Oct 8 2020, 12:12:24) \r\n[GCC 8.4.0] on linux\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n>>> import transformers\r\n2020-12-15 23:53:21.165827: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:49] Successfully opened dynamic library libcudart.so.11.0\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/transformers\/__init__.py\", line 22, in \r\n from .integrations import ( # isort:skip\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/transformers\/integrations.py\", line 5, in \r\n from .trainer_utils import EvaluationStrategy\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/transformers\/trainer_utils.py\", line 25, in \r\n from .file_utils import is_tf_available, is_torch_available, is_torch_tpu_available\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/transformers\/file_utils.py\", line 88, in \r\n import datasets # noqa: F401\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/__init__.py\", line 26, in \r\n from .arrow_dataset import Dataset, concatenate_datasets\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_dataset.py\", line 40, in \r\n from .arrow_reader import ArrowReader\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 31, in \r\n from .utils import cached_path, logging\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/__init__.py\", line 20, in \r\n from .download_manager import DownloadManager, GenerateMode\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/download_manager.py\", line 25, in \r\n from .file_utils import HF_DATASETS_CACHE, cached_path, get_from_cache, hash_url_to_filename\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py\", line 118, in \r\n os.makedirs(HF_MODULES_CACHE, exist_ok=True)\r\n File \"\/usr\/lib\/python3.6\/os.py\", line 210, in makedirs\r\n makedirs(head, mode, exist_ok)\r\n File \"\/usr\/lib\/python3.6\/os.py\", line 210, in makedirs\r\n makedirs(head, mode, exist_ok)\r\n File \"\/usr\/lib\/python3.6\/os.py\", line 220, in makedirs\r\n mkdir(name, mode)\r\nPermissionError: [Errno 13] Permission denied: '\/.cache'\r\n```\r\nI've pinned the problem to `RUN pip install datasets`, and by commenting it you can actually import transformers correctly. Another workaround I've found is creating the directory and giving permissions to it directly on the Dockerfile.\r\n\r\n```\r\nFROM tensorflow\/tensorflow:latest-gpu-jupyter\r\nWORKDIR \/root\r\n\r\nEXPOSE 80\r\nEXPOSE 8888\r\nEXPOSE 6006\r\n\r\nENV SHELL \/bin\/bash\r\nENV PATH=\"\/root\/.local\/bin:${PATH}\"\r\n\r\nENV CUDA_CACHE_PATH=\"\/root\/cache\/cuda\"\r\nENV CUDA_CACHE_MAXSIZE=\"4294967296\"\r\n\r\nENV TFHUB_CACHE_DIR=\"\/root\/cache\/tfhub\"\r\n\r\nRUN pip install --upgrade pip\r\n\r\nRUN apt update -y && apt upgrade -y\r\n\r\nRUN pip install transformers\r\n\r\n#Installing datasets will throw the error, try commenting and rebuilding\r\nRUN pip install datasets\r\n\r\n#Another workaround is creating the directory and give permissions explicitly\r\n#RUN mkdir \/.cache\r\n#RUN chmod 777 \/.cache\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1581\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1581\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1580","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1580\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1580\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1580\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1580","id":768111377,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQwNjQxNDQ3","number":1580,"title":"made suggested changes in diplomacy_detection.py","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-15T19:52:00Z","updated_at":"2020-12-16T10:27:52Z","closed_at":"2020-12-16T10:27:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1580","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1580","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1580.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1580.patch","merged_at":"2020-12-16T10:27:52Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1580\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1580\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1579","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1579\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1579\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1579\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1579","id":767808465,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQwMzk5OTY5","number":1579,"title":"Adding CLIMATE-FEVER dataset","user":{"login":"tdiggelm","id":1658969,"node_id":"MDQ6VXNlcjE2NTg5Njk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1658969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tdiggelm","html_url":"https:\/\/github.com\/tdiggelm","followers_url":"https:\/\/api.github.com\/users\/tdiggelm\/followers","following_url":"https:\/\/api.github.com\/users\/tdiggelm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tdiggelm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tdiggelm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tdiggelm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tdiggelm\/orgs","repos_url":"https:\/\/api.github.com\/users\/tdiggelm\/repos","events_url":"https:\/\/api.github.com\/users\/tdiggelm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tdiggelm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-15T16:49:22Z","updated_at":"2020-12-22T13:43:16Z","closed_at":"2020-12-22T13:43:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1579","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1579","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1579.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1579.patch","merged_at":null},"body":"This PR request the addition of the CLIMATE-FEVER dataset:\r\nA dataset adopting the FEVER methodology that consists of 1,535 real-world claims regarding climate-change collected on the internet. Each claim is accompanied by five manually annotated evidence sentences retrieved from the English Wikipedia that support, refute or do not give enough information to validate the claim totalling in 7,675 claim-evidence pairs. The dataset features challenging claims that relate multiple facets and disputed cases of claims where both supporting and refuting evidence are present.\r\n\r\nMore information can be found at:\r\n- Homepage: \r\n- Paper: \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1579\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1579\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1578","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1578\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1578\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1578\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1578","id":767760513,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQwMzY1NzYz","number":1578,"title":"update multiwozv22 checksums","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-15T16:13:52Z","updated_at":"2020-12-15T17:06:29Z","closed_at":"2020-12-15T17:06:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1578","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1578","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1578.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1578.patch","merged_at":"2020-12-15T17:06:29Z"},"body":"a file was updated on the GitHub repo for the dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1578\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1578\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1577","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1577\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1577\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1577\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1577","id":767342432,"node_id":"MDExOlB1bGxSZXF1ZXN0NTQwMDg2MzY5","number":1577,"title":"Add comet metric","user":{"login":"ricardorei","id":17256847,"node_id":"MDQ6VXNlcjE3MjU2ODQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17256847?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ricardorei","html_url":"https:\/\/github.com\/ricardorei","followers_url":"https:\/\/api.github.com\/users\/ricardorei\/followers","following_url":"https:\/\/api.github.com\/users\/ricardorei\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ricardorei\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ricardorei\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ricardorei\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ricardorei\/orgs","repos_url":"https:\/\/api.github.com\/users\/ricardorei\/repos","events_url":"https:\/\/api.github.com\/users\/ricardorei\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ricardorei\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-15T08:56:00Z","updated_at":"2021-01-14T13:33:10Z","closed_at":"2021-01-14T13:33:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1577","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1577","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1577.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1577.patch","merged_at":"2021-01-14T13:33:10Z"},"body":"Hey! I decided to add our new Crosslingual Optimized Metric for Evaluation of Translation (COMET) to the list of the available metrics.\r\n\r\nCOMET was [presented at EMNLP20](https:\/\/www.aclweb.org\/anthology\/2020.emnlp-main.213\/) and it is the highest performing metric, so far, on the WMT19 benchmark.\r\n\r\nWe also participated in the [WMT20 Metrics shared task ](http:\/\/www.statmt.org\/wmt20\/pdf\/2020.wmt-1.101.pdf) where once again COMET was validated as a top-performing metric. \r\n\r\n\r\nI hope that this metric will help researcher's and industry workers to better validate their MT systems in the future \ud83e\udd17 !\r\n\r\nCheers,\r\nRicardo\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1577\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1577\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1576","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1576\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1576\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1576\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1576","id":767080645,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5OTE3MTA0","number":1576,"title":"Remove the contributors section","user":{"login":"clmnt","id":821155,"node_id":"MDQ6VXNlcjgyMTE1NQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/821155?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/clmnt","html_url":"https:\/\/github.com\/clmnt","followers_url":"https:\/\/api.github.com\/users\/clmnt\/followers","following_url":"https:\/\/api.github.com\/users\/clmnt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/clmnt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/clmnt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/clmnt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/clmnt\/orgs","repos_url":"https:\/\/api.github.com\/users\/clmnt\/repos","events_url":"https:\/\/api.github.com\/users\/clmnt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/clmnt\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-15T01:47:15Z","updated_at":"2020-12-15T12:53:47Z","closed_at":"2020-12-15T12:53:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1576","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1576","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1576.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1576.patch","merged_at":"2020-12-15T12:53:46Z"},"body":"sourcerer is down","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1576\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1576\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1575","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1575\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1575\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1575\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1575","id":767076374,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5OTEzNzgx","number":1575,"title":"Hind_Encorp all done","user":{"login":"rahul-art","id":56379013,"node_id":"MDQ6VXNlcjU2Mzc5MDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56379013?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rahul-art","html_url":"https:\/\/github.com\/rahul-art","followers_url":"https:\/\/api.github.com\/users\/rahul-art\/followers","following_url":"https:\/\/api.github.com\/users\/rahul-art\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rahul-art\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rahul-art\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rahul-art\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rahul-art\/orgs","repos_url":"https:\/\/api.github.com\/users\/rahul-art\/repos","events_url":"https:\/\/api.github.com\/users\/rahul-art\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rahul-art\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-12-15T01:36:02Z","updated_at":"2020-12-16T15:15:17Z","closed_at":"2020-12-16T15:15:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1575","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1575","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1575.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1575.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1575\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1575\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1574","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1574\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1574\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1574\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1574","id":767015317,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5ODY1Mzcy","number":1574,"title":"Diplomacy detection 3","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-14T23:28:51Z","updated_at":"2020-12-14T23:29:32Z","closed_at":"2020-12-14T23:29:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1574","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1574","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1574.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1574.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1574\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1574\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1573","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1573\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1573\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1573\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1573","id":767011938,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5ODYyNjcx","number":1573,"title":"adding dataset for diplomacy detection-2","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-14T23:21:37Z","updated_at":"2020-12-14T23:36:57Z","closed_at":"2020-12-14T23:36:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1573","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1573","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1573.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1573.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1573\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1573\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1572","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1572\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1572\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1572\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1572","id":767008470,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5ODU5OTgx","number":1572,"title":"add Gnad10 dataset ","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-14T23:15:02Z","updated_at":"2021-09-17T16:54:37Z","closed_at":"2020-12-16T16:52:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1572","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1572","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1572.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1572.patch","merged_at":"2020-12-16T16:52:30Z"},"body":"reference [PR#1317](https:\/\/github.com\/huggingface\/datasets\/pull\/1317)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1572\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1572\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1571","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1571\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1571\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1571\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1571","id":766981721,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5ODM5OTEw","number":1571,"title":"Fixing the KILT tasks to match our current standards","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-14T22:26:12Z","updated_at":"2020-12-14T23:07:41Z","closed_at":"2020-12-14T23:07:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1571","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1571","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1571.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1571.patch","merged_at":"2020-12-14T23:07:41Z"},"body":"This introduces a few changes to the Knowledge Intensive Learning task benchmark to bring it more in line with our current datasets, including adding the (minimal) dataset card and having one config per sub-task","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1571\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1571\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1570","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1570\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1570\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1570\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1570","id":766830545,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5NzM1MDY2","number":1570,"title":"Documentation for loading CSV datasets misleads the user","user":{"login":"onurgu","id":56893,"node_id":"MDQ6VXNlcjU2ODkz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/onurgu","html_url":"https:\/\/github.com\/onurgu","followers_url":"https:\/\/api.github.com\/users\/onurgu\/followers","following_url":"https:\/\/api.github.com\/users\/onurgu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/onurgu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/onurgu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/onurgu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/onurgu\/orgs","repos_url":"https:\/\/api.github.com\/users\/onurgu\/repos","events_url":"https:\/\/api.github.com\/users\/onurgu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/onurgu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-14T19:04:37Z","updated_at":"2020-12-22T19:30:12Z","closed_at":"2020-12-21T13:47:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1570","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1570","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1570.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1570.patch","merged_at":"2020-12-21T13:47:09Z"},"body":"Documentation for loading CSV datasets misleads the user into thinking setting `quote_char' to False will disable quoting.\r\n\r\nThere are two problems here:\r\n i) `quote_char' is misspelled, must be `quotechar'\r\n ii) the documentation should mention `quoting'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1570\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1570\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1569","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1569\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1569\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1569\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1569","id":766758895,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5NjkwMjc2","number":1569,"title":"added un_ga dataset","user":{"login":"param087","id":26374564,"node_id":"MDQ6VXNlcjI2Mzc0NTY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26374564?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/param087","html_url":"https:\/\/github.com\/param087","followers_url":"https:\/\/api.github.com\/users\/param087\/followers","following_url":"https:\/\/api.github.com\/users\/param087\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/param087\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/param087\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/param087\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/param087\/orgs","repos_url":"https:\/\/api.github.com\/users\/param087\/repos","events_url":"https:\/\/api.github.com\/users\/param087\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/param087\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-14T17:42:04Z","updated_at":"2020-12-15T15:28:58Z","closed_at":"2020-12-15T15:28:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1569","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1569","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1569.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1569.patch","merged_at":"2020-12-15T15:28:58Z"},"body":"Hi :hugs:, This is a PR for [United nations general assembly resolutions: A six-language parallel corpus](http:\/\/opus.nlpl.eu\/UN.php) dataset.\r\nWith suggested changes in #1330 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1569\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1569\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1568","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1568\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1568\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1568\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1568","id":766722994,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5NjY2ODg1","number":1568,"title":"Added the dataset clickbait_news_bg","user":{"login":"tsvm","id":1083319,"node_id":"MDQ6VXNlcjEwODMzMTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1083319?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tsvm","html_url":"https:\/\/github.com\/tsvm","followers_url":"https:\/\/api.github.com\/users\/tsvm\/followers","following_url":"https:\/\/api.github.com\/users\/tsvm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tsvm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tsvm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tsvm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tsvm\/orgs","repos_url":"https:\/\/api.github.com\/users\/tsvm\/repos","events_url":"https:\/\/api.github.com\/users\/tsvm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tsvm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-14T17:03:00Z","updated_at":"2020-12-15T18:28:56Z","closed_at":"2020-12-15T18:28:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1568","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1568","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1568.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1568.patch","merged_at":"2020-12-15T18:28:56Z"},"body":"There was a problem with my [previous PR 1445](https:\/\/github.com\/huggingface\/datasets\/pull\/1445) after rebasing, so I'm copying the dataset code into a new branch and submitting a new PR.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1568\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1568\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1567","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1567\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1567\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1567\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1567","id":766382609,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5NDE3NzI5","number":1567,"title":"[wording] Update Readme.md","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-14T12:34:52Z","updated_at":"2020-12-15T12:54:07Z","closed_at":"2020-12-15T12:54:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1567","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1567","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1567.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1567.patch","merged_at":"2020-12-15T12:54:06Z"},"body":"Make the features of the library clearer.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1567\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1567\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1566","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1566\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1566\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1566\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1566","id":766354236,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5Mzk5NTg4","number":1566,"title":"Add Microsoft Research Sequential Question Answering (SQA) Dataset","user":{"login":"mattbui","id":46804938,"node_id":"MDQ6VXNlcjQ2ODA0OTM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46804938?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mattbui","html_url":"https:\/\/github.com\/mattbui","followers_url":"https:\/\/api.github.com\/users\/mattbui\/followers","following_url":"https:\/\/api.github.com\/users\/mattbui\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mattbui\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mattbui\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mattbui\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mattbui\/orgs","repos_url":"https:\/\/api.github.com\/users\/mattbui\/repos","events_url":"https:\/\/api.github.com\/users\/mattbui\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mattbui\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-14T12:02:30Z","updated_at":"2020-12-15T15:24:22Z","closed_at":"2020-12-15T15:24:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1566","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1566","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1566.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1566.patch","merged_at":"2020-12-15T15:24:22Z"},"body":"For more information: https:\/\/msropendata.com\/datasets\/b25190ed-0f59-47b1-9211-5962858142c2","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1566\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1566\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1565","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1565\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1565\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1565\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1565","id":766333940,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5Mzg2MzEx","number":1565,"title":"Create README.md","user":{"login":"ManuelFay","id":43467008,"node_id":"MDQ6VXNlcjQzNDY3MDA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43467008?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ManuelFay","html_url":"https:\/\/github.com\/ManuelFay","followers_url":"https:\/\/api.github.com\/users\/ManuelFay\/followers","following_url":"https:\/\/api.github.com\/users\/ManuelFay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ManuelFay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ManuelFay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ManuelFay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ManuelFay\/orgs","repos_url":"https:\/\/api.github.com\/users\/ManuelFay\/repos","events_url":"https:\/\/api.github.com\/users\/ManuelFay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ManuelFay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-14T11:40:23Z","updated_at":"2021-03-25T14:01:49Z","closed_at":"2021-03-25T14:01:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1565","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1565","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1565.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1565.patch","merged_at":"2021-03-25T14:01:49Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1565\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1565\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1564","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1564\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1564\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1564\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1564","id":766266609,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MzQzMjAy","number":1564,"title":"added saudinewsnet","user":{"login":"abdulelahsm","id":28743265,"node_id":"MDQ6VXNlcjI4NzQzMjY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28743265?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abdulelahsm","html_url":"https:\/\/github.com\/abdulelahsm","followers_url":"https:\/\/api.github.com\/users\/abdulelahsm\/followers","following_url":"https:\/\/api.github.com\/users\/abdulelahsm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abdulelahsm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abdulelahsm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abdulelahsm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abdulelahsm\/orgs","repos_url":"https:\/\/api.github.com\/users\/abdulelahsm\/repos","events_url":"https:\/\/api.github.com\/users\/abdulelahsm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abdulelahsm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-12-14T10:35:09Z","updated_at":"2020-12-22T09:51:04Z","closed_at":"2020-12-22T09:51:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1564","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1564","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1564.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1564.patch","merged_at":"2020-12-22T09:51:04Z"},"body":"I'm having issues in creating the dummy data. I'm still investigating how to fix it. I'll close the PR if I couldn't find a solution","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1564\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1564\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1563","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1563\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1563\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1563\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1563","id":766211931,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MzA4Mzg4","number":1563,"title":"adding tmu-gfm-dataset","user":{"login":"forest1988","id":2755894,"node_id":"MDQ6VXNlcjI3NTU4OTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2755894?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/forest1988","html_url":"https:\/\/github.com\/forest1988","followers_url":"https:\/\/api.github.com\/users\/forest1988\/followers","following_url":"https:\/\/api.github.com\/users\/forest1988\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/forest1988\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/forest1988\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/forest1988\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/forest1988\/orgs","repos_url":"https:\/\/api.github.com\/users\/forest1988\/repos","events_url":"https:\/\/api.github.com\/users\/forest1988\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/forest1988\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-14T09:45:30Z","updated_at":"2020-12-21T10:21:04Z","closed_at":"2020-12-21T10:07:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1563","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1563","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1563.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1563.patch","merged_at":"2020-12-21T10:07:13Z"},"body":"Adding TMU-GFM-Dataset for Grammatical Error Correction.\r\n\r\nhttps:\/\/github.com\/tmu-nlp\/TMU-GFM-Dataset\r\n\r\nA dataset for GEC metrics with manual evaluations of grammaticality, fluency, and meaning preservation for system outputs.\r\nMore detail about the creation of the dataset can be found in [Yoshimura et al. (2020)](https:\/\/www.aclweb.org\/anthology\/2020.coling-main.573.pdf).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1563\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1563\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1562","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1562\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1562\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1562\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1562","id":765981749,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MTc5ODc3","number":1562,"title":"Add dataset COrpus of Urdu News TExt Reuse (COUNTER).","user":{"login":"arkhalid","id":14899066,"node_id":"MDQ6VXNlcjE0ODk5MDY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14899066?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arkhalid","html_url":"https:\/\/github.com\/arkhalid","followers_url":"https:\/\/api.github.com\/users\/arkhalid\/followers","following_url":"https:\/\/api.github.com\/users\/arkhalid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arkhalid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arkhalid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arkhalid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arkhalid\/orgs","repos_url":"https:\/\/api.github.com\/users\/arkhalid\/repos","events_url":"https:\/\/api.github.com\/users\/arkhalid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arkhalid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-14T06:32:48Z","updated_at":"2020-12-21T13:14:46Z","closed_at":"2020-12-21T13:14:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1562","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1562","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1562.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1562.patch","merged_at":"2020-12-21T13:14:46Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1562\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1562\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1561","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1561\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1561\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1561\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1561","id":765831436,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MTAwNjAy","number":1561,"title":"Lama","user":{"login":"ontocord","id":8900094,"node_id":"MDQ6VXNlcjg5MDAwOTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8900094?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ontocord","html_url":"https:\/\/github.com\/ontocord","followers_url":"https:\/\/api.github.com\/users\/ontocord\/followers","following_url":"https:\/\/api.github.com\/users\/ontocord\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ontocord\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ontocord\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ontocord\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ontocord\/orgs","repos_url":"https:\/\/api.github.com\/users\/ontocord\/repos","events_url":"https:\/\/api.github.com\/users\/ontocord\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ontocord\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-14T03:27:10Z","updated_at":"2020-12-28T09:51:47Z","closed_at":"2020-12-28T09:51:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1561","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1561","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1561.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1561.patch","merged_at":"2020-12-28T09:51:47Z"},"body":"This the LAMA dataset for probing facts and common sense from language models. \r\n\r\nSee https:\/\/github.com\/facebookresearch\/LAMA for more details.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1561\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1561\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1560","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1560\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1560\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1560\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1560","id":765814964,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDkzMzky","number":1560,"title":"Adding the BrWaC dataset","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-14T03:03:56Z","updated_at":"2020-12-18T15:56:56Z","closed_at":"2020-12-18T15:56:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1560","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1560","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1560.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1560.patch","merged_at":"2020-12-18T15:56:55Z"},"body":"Adding the BrWaC dataset, a large corpus of Portuguese language texts","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1560\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1560\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1559","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1559\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1559\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1559\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1559","id":765714183,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDQ5MTky","number":1559,"title":"adding dataset card information to CONTRIBUTING.md","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-14T00:08:43Z","updated_at":"2020-12-14T17:55:03Z","closed_at":"2020-12-14T17:55:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1559","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1559","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1559.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1559.patch","merged_at":"2020-12-14T17:55:03Z"},"body":"Added a documentation line and link to the full sprint guide in the \"How to add a dataset\" section, and a section on how to contribute to the dataset card of an existing dataset.\r\n\r\nAnd a thank you note at the end :hugs: ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1559\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1559\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1558","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1558\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1558\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1558\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1558","id":765707907,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDQ2MzA4","number":1558,"title":"Adding Igbo NER data ","user":{"login":"purvimisal","id":22298787,"node_id":"MDQ6VXNlcjIyMjk4Nzg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22298787?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/purvimisal","html_url":"https:\/\/github.com\/purvimisal","followers_url":"https:\/\/api.github.com\/users\/purvimisal\/followers","following_url":"https:\/\/api.github.com\/users\/purvimisal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/purvimisal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/purvimisal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/purvimisal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/purvimisal\/orgs","repos_url":"https:\/\/api.github.com\/users\/purvimisal\/repos","events_url":"https:\/\/api.github.com\/users\/purvimisal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/purvimisal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-13T23:52:11Z","updated_at":"2020-12-21T14:38:20Z","closed_at":"2020-12-21T14:38:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1558","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1558","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1558.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1558.patch","merged_at":"2020-12-21T14:38:20Z"},"body":"This PR adds the Igbo NER dataset.\r\nData: https:\/\/github.com\/IgnatiusEzeani\/IGBONLP\/tree\/master\/ig_ner ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1558\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1558\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1557","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1557\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1557\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1557\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1557","id":765693927,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDM5MDY0","number":1557,"title":"HindEncorp again commited","user":{"login":"rahul-art","id":56379013,"node_id":"MDQ6VXNlcjU2Mzc5MDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56379013?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rahul-art","html_url":"https:\/\/github.com\/rahul-art","followers_url":"https:\/\/api.github.com\/users\/rahul-art\/followers","following_url":"https:\/\/api.github.com\/users\/rahul-art\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rahul-art\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rahul-art\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rahul-art\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rahul-art\/orgs","repos_url":"https:\/\/api.github.com\/users\/rahul-art\/repos","events_url":"https:\/\/api.github.com\/users\/rahul-art\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rahul-art\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-12-13T23:09:02Z","updated_at":"2020-12-15T10:37:05Z","closed_at":"2020-12-15T10:37:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1557","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1557","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1557.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1557.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1557\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1557\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1556","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1556\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1556\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1556\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1556","id":765689730,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDM2OTYz","number":1556,"title":"add bswac","user":{"login":"IvanZidov","id":11391118,"node_id":"MDQ6VXNlcjExMzkxMTE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11391118?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/IvanZidov","html_url":"https:\/\/github.com\/IvanZidov","followers_url":"https:\/\/api.github.com\/users\/IvanZidov\/followers","following_url":"https:\/\/api.github.com\/users\/IvanZidov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/IvanZidov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/IvanZidov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/IvanZidov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/IvanZidov\/orgs","repos_url":"https:\/\/api.github.com\/users\/IvanZidov\/repos","events_url":"https:\/\/api.github.com\/users\/IvanZidov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/IvanZidov\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-13T22:55:35Z","updated_at":"2020-12-18T15:14:28Z","closed_at":"2020-12-18T15:14:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1556","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1556","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1556.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1556.patch","merged_at":"2020-12-18T15:14:27Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1556\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1556\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1555","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1555\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1555\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1555\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1555","id":765681607,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDMzMzIw","number":1555,"title":"Added Opus TedTalks","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-13T22:29:33Z","updated_at":"2020-12-18T09:44:43Z","closed_at":"2020-12-18T09:44:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1555","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1555","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1555.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1555.patch","merged_at":"2020-12-18T09:44:43Z"},"body":"Dataset : http:\/\/opus.nlpl.eu\/TedTalks.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1555\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1555\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1554","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1554\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1554\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1554\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1554","id":765675148,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDMwNDU2","number":1554,"title":"Opus CAPES added","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-13T22:11:34Z","updated_at":"2020-12-18T09:54:57Z","closed_at":"2020-12-18T08:46:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1554","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1554","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1554.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1554.patch","merged_at":null},"body":"Dataset : http:\/\/opus.nlpl.eu\/CAPES.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1554\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1554\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1553","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1553\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1553\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1553\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1553","id":765670083,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDI4MzM3","number":1553,"title":"added air_dialogue","user":{"login":"skyprince999","id":9033954,"node_id":"MDQ6VXNlcjkwMzM5NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9033954?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/skyprince999","html_url":"https:\/\/github.com\/skyprince999","followers_url":"https:\/\/api.github.com\/users\/skyprince999\/followers","following_url":"https:\/\/api.github.com\/users\/skyprince999\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/skyprince999\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/skyprince999\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/skyprince999\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/skyprince999\/orgs","repos_url":"https:\/\/api.github.com\/users\/skyprince999\/repos","events_url":"https:\/\/api.github.com\/users\/skyprince999\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/skyprince999\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-13T21:59:02Z","updated_at":"2020-12-23T11:20:40Z","closed_at":"2020-12-23T11:20:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1553","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1553","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1553.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1553.patch","merged_at":"2020-12-23T11:20:39Z"},"body":"UPDATE2 (3797ce5): Updated for multi-configs \r\n\r\nUPDATE (7018082): manually created the dummy_datasets. All tests were cleared locally. Pushed it to origin\/master\r\n\r\nDRAFT VERSION (57fdb20): (_no longer draft_)\r\nUploaded the air_dialogue database. \r\ndummy_data creation was failing in local, since the original downloaded file has some nested folders. Pushing it since the tests with real data was cleared. Will re-check & update via manually creating some dummy_data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1553\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1553\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1552","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1552\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1552\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1552\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1552","id":765664411,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDI2MzAx","number":1552,"title":"Added OPUS ParaCrawl","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-13T21:44:29Z","updated_at":"2020-12-21T09:50:26Z","closed_at":"2020-12-21T09:50:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1552","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1552","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1552.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1552.patch","merged_at":"2020-12-21T09:50:25Z"},"body":"Dataset : http:\/\/opus.nlpl.eu\/ParaCrawl.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1552\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1552\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1551","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1551\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1551\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1551\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1551","id":765621879,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDEwNDAy","number":1551,"title":"Monero","user":{"login":"iliemihai","id":2815308,"node_id":"MDQ6VXNlcjI4MTUzMDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2815308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliemihai","html_url":"https:\/\/github.com\/iliemihai","followers_url":"https:\/\/api.github.com\/users\/iliemihai\/followers","following_url":"https:\/\/api.github.com\/users\/iliemihai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliemihai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliemihai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliemihai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliemihai\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliemihai\/repos","events_url":"https:\/\/api.github.com\/users\/iliemihai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliemihai\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-13T19:56:48Z","updated_at":"2020-12-18T14:42:13Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1551","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1551","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1551.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1551.patch","merged_at":null},"body":"Biomedical Romanian dataset :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1551\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1551\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1550","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1550\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1550\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1550\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1550","id":765620925,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDEwMDY1","number":1550,"title":"Add offensive langauge dravidian dataset","user":{"login":"jamespaultg","id":7421838,"node_id":"MDQ6VXNlcjc0MjE4Mzg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7421838?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jamespaultg","html_url":"https:\/\/github.com\/jamespaultg","followers_url":"https:\/\/api.github.com\/users\/jamespaultg\/followers","following_url":"https:\/\/api.github.com\/users\/jamespaultg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jamespaultg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jamespaultg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jamespaultg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jamespaultg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jamespaultg\/repos","events_url":"https:\/\/api.github.com\/users\/jamespaultg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jamespaultg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-13T19:54:19Z","updated_at":"2020-12-18T15:52:49Z","closed_at":"2020-12-18T14:25:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1550","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1550","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1550.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1550.patch","merged_at":"2020-12-18T14:25:30Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1550\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1550\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1549","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1549\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1549\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1549\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1549","id":765612905,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDA3MTU4","number":1549,"title":"Generics kb new branch","user":{"login":"bpatidar","id":12439573,"node_id":"MDQ6VXNlcjEyNDM5NTcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12439573?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bpatidar","html_url":"https:\/\/github.com\/bpatidar","followers_url":"https:\/\/api.github.com\/users\/bpatidar\/followers","following_url":"https:\/\/api.github.com\/users\/bpatidar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bpatidar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bpatidar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bpatidar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bpatidar\/orgs","repos_url":"https:\/\/api.github.com\/users\/bpatidar\/repos","events_url":"https:\/\/api.github.com\/users\/bpatidar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bpatidar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-13T19:33:10Z","updated_at":"2020-12-21T13:55:09Z","closed_at":"2020-12-21T13:55:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1549","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1549","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1549.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1549.patch","merged_at":"2020-12-21T13:55:09Z"},"body":"Datasets need manual downloads. Have thus created dummy data as well. But pytest on real and dummy data are failing.\r\nI have completed the readme , tags and other required things. I need to create the metadata json once tests get successful.\r\nOpening a PR while working with Yacine Jernite to resolve my pytest issues.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1549\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1549\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1548","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1548\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1548\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1548\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1548","id":765592336,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM5MDAwMjIy","number":1548,"title":"Fix `\ud83e\udd17Datasets` - `tfds` differences link + a few aesthetics","user":{"login":"VIVelev","id":22171622,"node_id":"MDQ6VXNlcjIyMTcxNjIy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22171622?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VIVelev","html_url":"https:\/\/github.com\/VIVelev","followers_url":"https:\/\/api.github.com\/users\/VIVelev\/followers","following_url":"https:\/\/api.github.com\/users\/VIVelev\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VIVelev\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VIVelev\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VIVelev\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VIVelev\/orgs","repos_url":"https:\/\/api.github.com\/users\/VIVelev\/repos","events_url":"https:\/\/api.github.com\/users\/VIVelev\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VIVelev\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-13T18:48:21Z","updated_at":"2020-12-15T12:55:27Z","closed_at":"2020-12-15T12:55:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1548","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1548","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1548.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1548.patch","merged_at":"2020-12-15T12:55:27Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1548\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1548\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1547","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1547\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1547\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1547\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1547","id":765562792,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4OTkwOTMy","number":1547,"title":"Adding PolEval2019 Machine Translation Task dataset","user":{"login":"vrindaprabhu","id":16264631,"node_id":"MDQ6VXNlcjE2MjY0NjMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16264631?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vrindaprabhu","html_url":"https:\/\/github.com\/vrindaprabhu","followers_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/followers","following_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/orgs","repos_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/repos","events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-13T17:50:03Z","updated_at":"2021-02-16T05:27:56Z","closed_at":"2020-12-21T16:13:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1547","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1547","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1547.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1547.patch","merged_at":"2020-12-21T16:13:21Z"},"body":"Facing an error with pytest in training. Dummy data is passing.\r\nREADME has to be updated.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1547\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1547\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1546","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1546\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1546\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1546\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1546","id":765559923,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4OTkwMjgw","number":1546,"title":"Add persian ner dataset","user":{"login":"KMFODA","id":35491698,"node_id":"MDQ6VXNlcjM1NDkxNjk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35491698?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KMFODA","html_url":"https:\/\/github.com\/KMFODA","followers_url":"https:\/\/api.github.com\/users\/KMFODA\/followers","following_url":"https:\/\/api.github.com\/users\/KMFODA\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KMFODA\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KMFODA\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KMFODA\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KMFODA\/orgs","repos_url":"https:\/\/api.github.com\/users\/KMFODA\/repos","events_url":"https:\/\/api.github.com\/users\/KMFODA\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KMFODA\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-13T17:45:48Z","updated_at":"2020-12-23T09:53:03Z","closed_at":"2020-12-23T09:53:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1546","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1546","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1546.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1546.patch","merged_at":"2020-12-23T09:53:03Z"},"body":"Adding the following dataset:\r\n\r\nhttps:\/\/github.com\/HaniehP\/PersianNER\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1546\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1546\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1545","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1545\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1545\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1545\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1545","id":765550283,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4OTg3OTY0","number":1545,"title":"add hrwac","user":{"login":"IvanZidov","id":11391118,"node_id":"MDQ6VXNlcjExMzkxMTE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11391118?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/IvanZidov","html_url":"https:\/\/github.com\/IvanZidov","followers_url":"https:\/\/api.github.com\/users\/IvanZidov\/followers","following_url":"https:\/\/api.github.com\/users\/IvanZidov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/IvanZidov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/IvanZidov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/IvanZidov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/IvanZidov\/orgs","repos_url":"https:\/\/api.github.com\/users\/IvanZidov\/repos","events_url":"https:\/\/api.github.com\/users\/IvanZidov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/IvanZidov\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-13T17:31:54Z","updated_at":"2020-12-18T13:35:17Z","closed_at":"2020-12-18T13:35:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1545","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1545","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1545.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1545.patch","merged_at":"2020-12-18T13:35:17Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1545\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1545\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1544","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1544\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1544\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1544\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1544","id":765514828,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4OTc5MjIz","number":1544,"title":"Added Wiki Summary Dataset","user":{"login":"tanmoyio","id":33005287,"node_id":"MDQ6VXNlcjMzMDA1Mjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33005287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tanmoyio","html_url":"https:\/\/github.com\/tanmoyio","followers_url":"https:\/\/api.github.com\/users\/tanmoyio\/followers","following_url":"https:\/\/api.github.com\/users\/tanmoyio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tanmoyio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tanmoyio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tanmoyio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tanmoyio\/orgs","repos_url":"https:\/\/api.github.com\/users\/tanmoyio\/repos","events_url":"https:\/\/api.github.com\/users\/tanmoyio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tanmoyio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":18,"created_at":"2020-12-13T16:33:46Z","updated_at":"2020-12-18T16:20:06Z","closed_at":"2020-12-18T16:17:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1544","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1544","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1544.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1544.patch","merged_at":"2020-12-18T16:17:18Z"},"body":"Wiki Summary: Dataset extracted from Persian Wikipedia into the form of articles and highlights.\r\nLink: https:\/\/github.com\/m3hrdadfi\/wiki-summary","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1544\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1544\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1543","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1543\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1543\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1543\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1543","id":765476196,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4OTcwOTU5","number":1543,"title":"adding HindEncorp","user":{"login":"rahul-art","id":56379013,"node_id":"MDQ6VXNlcjU2Mzc5MDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56379013?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rahul-art","html_url":"https:\/\/github.com\/rahul-art","followers_url":"https:\/\/api.github.com\/users\/rahul-art\/followers","following_url":"https:\/\/api.github.com\/users\/rahul-art\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rahul-art\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rahul-art\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rahul-art\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rahul-art\/orgs","repos_url":"https:\/\/api.github.com\/users\/rahul-art\/repos","events_url":"https:\/\/api.github.com\/users\/rahul-art\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rahul-art\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-13T15:39:07Z","updated_at":"2020-12-13T23:35:53Z","closed_at":"2020-12-13T23:35:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1543","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1543","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1543.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1543.patch","merged_at":null},"body":"adding Hindi Wikipedia corpus","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1543\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1543\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1542","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1542\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1542\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1542\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1542","id":765439746,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4OTYyMjAx","number":1542,"title":"fix typo readme","user":{"login":"clmnt","id":821155,"node_id":"MDQ6VXNlcjgyMTE1NQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/821155?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/clmnt","html_url":"https:\/\/github.com\/clmnt","followers_url":"https:\/\/api.github.com\/users\/clmnt\/followers","following_url":"https:\/\/api.github.com\/users\/clmnt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/clmnt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/clmnt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/clmnt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/clmnt\/orgs","repos_url":"https:\/\/api.github.com\/users\/clmnt\/repos","events_url":"https:\/\/api.github.com\/users\/clmnt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/clmnt\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-13T14:41:22Z","updated_at":"2020-12-13T17:16:41Z","closed_at":"2020-12-13T17:16:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1542","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1542","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1542.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1542.patch","merged_at":"2020-12-13T17:16:40Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1542\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1542\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1541","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1541\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1541\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1541\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1541","id":765430586,"node_id":"MDU6SXNzdWU3NjU0MzA1ODY=","number":1541,"title":"connection issue while downloading data","user":{"login":"rabeehkarimimahabadi","id":73364383,"node_id":"MDQ6VXNlcjczMzY0Mzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73364383?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi","html_url":"https:\/\/github.com\/rabeehkarimimahabadi","followers_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-13T14:27:00Z","updated_at":"2020-12-15T10:34:31Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am running my codes on google cloud, and I am getting this error resulting in the failure of the codes when trying to download the data, could you assist me to solve this? also as a temporary solution, could you tell me how I can increase the number of retries and timeout to at least let the models run for now. thanks \r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"finetune_t5_trainer.py\", line 361, in \r\n main()\r\n File \"finetune_t5_trainer.py\", line 269, in main\r\n add_prefix=False if training_args.train_adapters else True)\r\n File \"\/workdir\/seq2seq\/data\/tasks.py\", line 70, in get_dataset\r\n dataset = self.load_dataset(split=split)\r\n File \"\/workdir\/seq2seq\/data\/tasks.py\", line 306, in load_dataset\r\n return datasets.load_dataset('glue', 'cola', split=split)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py\", line 589, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py\", line 263, in prepare_module\r\n head_hf_s3(path, filename=name, dataset=dataset)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py\", line 200, in head_hf_s3\r\n return http_head(hf_bucket_url(identifier=identifier, filename=filename, use_cdn=use_cdn, dataset=dataset))\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py\", line 403, in http_head\r\n url, proxies=proxies, headers=headers, cookies=cookies, allow_redirects=allow_redirects, timeout=timeout\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/requests\/api.py\", line 104, in head\r\n return request('head', url, **kwargs)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/requests\/api.py\", line 61, in request\r\n return session.request(method=method, url=url, **kwargs)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/requests\/sessions.py\", line 542, in request\r\n resp = self.send(prep, **send_kwargs)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/requests\/sessions.py\", line 655, in send\r\n r = adapter.send(request, **kwargs)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/requests\/adapters.py\", line 504, in send\r\n raise ConnectTimeout(e, request=request)\r\nrequests.exceptions.ConnectTimeout: HTTPSConnectionPool(host='s3.amazonaws.com', port=443): Max retries exceeded with url: \/datasets.huggingface.co\/datasets\/datasets\/glue\/glue.py (Caused by ConnectTimeoutError(, 'Connection to s3.amazonaws.com timed out. (connect timeout=10)'))\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1541\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1541\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1540","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1540\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1540\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1540\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1540","id":765357702,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4OTQ1NDc2","number":1540,"title":"added TTC4900: A Benchmark Data for Turkish Text Categorization dataset","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-12-13T12:43:33Z","updated_at":"2020-12-18T10:09:01Z","closed_at":"2020-12-18T10:09:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1540","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1540","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1540.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1540.patch","merged_at":"2020-12-18T10:09:01Z"},"body":"This PR adds the TTC4900 dataset which is a Turkish Text Categorization dataset by me and @basakbuluz. \r\n\r\nHomepage: [https:\/\/www.kaggle.com\/savasy\/ttc4900](https:\/\/www.kaggle.com\/savasy\/ttc4900)\r\nPoint of Contact: [Sava\u015f Y\u0131ld\u0131r\u0131m](mailto:savasy@gmail.com) \/ @savasy\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1540\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1540\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1539","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1539\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1539\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1539\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1539","id":765338910,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4OTQyMTU4","number":1539,"title":"Added Wiki Asp dataset","user":{"login":"katnoria","id":7674948,"node_id":"MDQ6VXNlcjc2NzQ5NDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7674948?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/katnoria","html_url":"https:\/\/github.com\/katnoria","followers_url":"https:\/\/api.github.com\/users\/katnoria\/followers","following_url":"https:\/\/api.github.com\/users\/katnoria\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/katnoria\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/katnoria\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/katnoria\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/katnoria\/orgs","repos_url":"https:\/\/api.github.com\/users\/katnoria\/repos","events_url":"https:\/\/api.github.com\/users\/katnoria\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/katnoria\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-13T12:18:34Z","updated_at":"2020-12-22T10:16:01Z","closed_at":"2020-12-22T10:16:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1539","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1539","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1539.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1539.patch","merged_at":null},"body":"Hello,\r\n\r\nI have added Wiki Asp dataset. Please review the PR.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1539\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1539\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1538","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1538\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1538\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1538\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1538","id":765139739,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4ODkxOTE3","number":1538,"title":"tweets_hate_speech_detection","user":{"login":"darshan-gandhi","id":44197177,"node_id":"MDQ6VXNlcjQ0MTk3MTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44197177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/darshan-gandhi","html_url":"https:\/\/github.com\/darshan-gandhi","followers_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/followers","following_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/orgs","repos_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/repos","events_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-13T07:37:53Z","updated_at":"2020-12-21T15:54:28Z","closed_at":"2020-12-21T15:54:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1538","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1538","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1538.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1538.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1538\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1538\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1537","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1537\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1537\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1537\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1537","id":765095210,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4ODY1NzIz","number":1537,"title":"added ohsumed ","user":{"login":"skyprince999","id":9033954,"node_id":"MDQ6VXNlcjkwMzM5NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9033954?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/skyprince999","html_url":"https:\/\/github.com\/skyprince999","followers_url":"https:\/\/api.github.com\/users\/skyprince999\/followers","following_url":"https:\/\/api.github.com\/users\/skyprince999\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/skyprince999\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/skyprince999\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/skyprince999\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/skyprince999\/orgs","repos_url":"https:\/\/api.github.com\/users\/skyprince999\/repos","events_url":"https:\/\/api.github.com\/users\/skyprince999\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/skyprince999\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-13T06:58:23Z","updated_at":"2020-12-17T18:28:16Z","closed_at":"2020-12-17T18:28:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1537","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1537","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1537.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1537.patch","merged_at":"2020-12-17T18:28:16Z"},"body":"UPDATE2: PR passed all tests. Now waiting for review.\r\n\r\nUPDATE: pushed a new version. cross fingers that it should complete all the tests! :) \r\n If it passes all tests then it's not a draft version. \r\n\r\nThis is a draft version ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1537\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1537\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1536","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1536\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1536\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1536\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1536","id":765043121,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4ODM2MDM3","number":1536,"title":"Add Hippocorpus Dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-13T06:13:02Z","updated_at":"2020-12-15T13:41:17Z","closed_at":"2020-12-15T13:40:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1536","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1536","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1536.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1536.patch","merged_at":"2020-12-15T13:40:11Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1536\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1536\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1535","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1535\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1535\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1535\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1535","id":764977542,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4ODAwMDUw","number":1535,"title":"Adding Igbo monolingual dataset","user":{"login":"purvimisal","id":22298787,"node_id":"MDQ6VXNlcjIyMjk4Nzg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22298787?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/purvimisal","html_url":"https:\/\/github.com\/purvimisal","followers_url":"https:\/\/api.github.com\/users\/purvimisal\/followers","following_url":"https:\/\/api.github.com\/users\/purvimisal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/purvimisal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/purvimisal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/purvimisal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/purvimisal\/orgs","repos_url":"https:\/\/api.github.com\/users\/purvimisal\/repos","events_url":"https:\/\/api.github.com\/users\/purvimisal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/purvimisal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-13T05:16:37Z","updated_at":"2020-12-21T14:39:49Z","closed_at":"2020-12-21T14:39:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1535","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1535","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1535.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1535.patch","merged_at":"2020-12-21T14:39:48Z"},"body":"This PR adds the Igbo Monolingual dataset.\r\nData: https:\/\/github.com\/IgnatiusEzeani\/IGBONLP\/tree\/master\/ig_monoling\r\nPaper: https:\/\/arxiv.org\/abs\/2004.00648 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1535\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1535\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1534","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1534\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1534\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1534\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1534","id":764934681,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4Nzc1Njky","number":1534,"title":"adding dataset for diplomacy detection","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-13T04:38:43Z","updated_at":"2020-12-15T19:52:52Z","closed_at":"2020-12-15T19:52:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1534","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1534","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1534.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1534.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1534\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1534\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1533","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1533\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1533\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1533\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1533","id":764835913,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NzE4MDAz","number":1533,"title":"add id_panl_bppt, a parallel corpus for en-id","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-13T03:11:27Z","updated_at":"2020-12-21T10:40:36Z","closed_at":"2020-12-21T10:40:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1533","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1533","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1533.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1533.patch","merged_at":"2020-12-21T10:40:36Z"},"body":"Parallel Text Corpora for English - Indonesian","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1533\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1533\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1532","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1532\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1532\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1532\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1532","id":764772184,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NjgxODcz","number":1532,"title":"adding hate-speech-and-offensive-language","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-13T02:16:31Z","updated_at":"2020-12-17T18:36:54Z","closed_at":"2020-12-17T18:10:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1532","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1532","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1532.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1532.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1532\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1532\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1531","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1531\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1531\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1531\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1531","id":764752882,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NjcwNzcz","number":1531,"title":"adding hate-speech-and-offensive-language","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-13T01:59:07Z","updated_at":"2020-12-13T02:17:02Z","closed_at":"2020-12-13T02:17:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1531","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1531","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1531.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1531.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1531\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1531\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1530","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1530\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1530\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1530\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1530","id":764749507,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NjY4ODI3","number":1530,"title":"add indonlu benchmark datasets","user":{"login":"yasirabd","id":6518504,"node_id":"MDQ6VXNlcjY1MTg1MDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6518504?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yasirabd","html_url":"https:\/\/github.com\/yasirabd","followers_url":"https:\/\/api.github.com\/users\/yasirabd\/followers","following_url":"https:\/\/api.github.com\/users\/yasirabd\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yasirabd\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yasirabd\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yasirabd\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yasirabd\/orgs","repos_url":"https:\/\/api.github.com\/users\/yasirabd\/repos","events_url":"https:\/\/api.github.com\/users\/yasirabd\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yasirabd\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-13T01:56:09Z","updated_at":"2020-12-16T11:11:43Z","closed_at":"2020-12-16T11:11:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1530","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1530","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1530.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1530.patch","merged_at":"2020-12-16T11:11:43Z"},"body":"The IndoNLU benchmark is a collection of resources for training, evaluating, and analyzing natural language understanding systems for the Indonesian language. There are 12 datasets in IndoNLU.\r\n\r\nThis is a new clean PR from [#1322](https:\/\/github.com\/huggingface\/datasets\/pull\/1322)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1530\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1530\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1529","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1529\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1529\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1529\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1529","id":764748410,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NjY4MjU4","number":1529,"title":"Ro sent","user":{"login":"iliemihai","id":2815308,"node_id":"MDQ6VXNlcjI4MTUzMDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2815308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliemihai","html_url":"https:\/\/github.com\/iliemihai","followers_url":"https:\/\/api.github.com\/users\/iliemihai\/followers","following_url":"https:\/\/api.github.com\/users\/iliemihai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliemihai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliemihai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliemihai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliemihai\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliemihai\/repos","events_url":"https:\/\/api.github.com\/users\/iliemihai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliemihai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-12-13T01:55:02Z","updated_at":"2021-03-19T10:32:43Z","closed_at":"2021-03-19T10:32:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1529","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1529","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1529.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1529.patch","merged_at":null},"body":"Movies reviews dataset for Romanian language.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1529\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1529\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1528","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1528\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1528\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1528\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1528","id":764724035,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NjU0ODU0","number":1528,"title":"initial commit for Common Crawl Domain Names","user":{"login":"Karthik-Bhaskar","id":13200370,"node_id":"MDQ6VXNlcjEzMjAwMzcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13200370?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar","html_url":"https:\/\/github.com\/Karthik-Bhaskar","followers_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/followers","following_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/orgs","repos_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/repos","events_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-13T01:32:49Z","updated_at":"2020-12-18T13:51:38Z","closed_at":"2020-12-18T10:22:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1528","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1528","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1528.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1528.patch","merged_at":"2020-12-18T10:22:32Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1528\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1528\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1527","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1527\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1527\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1527\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1527","id":764638504,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NjA3MjQw","number":1527,"title":"Add : Conv AI 2 (Messed up original PR)","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-13T00:21:14Z","updated_at":"2020-12-13T19:14:24Z","closed_at":"2020-12-13T19:14:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1527","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1527","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1527.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1527.patch","merged_at":"2020-12-13T19:14:24Z"},"body":"@lhoestq Sorry I messed up the previous 2 PR's -> https:\/\/github.com\/huggingface\/datasets\/pull\/1462 -> https:\/\/github.com\/huggingface\/datasets\/pull\/1383. So created a new one. Also, everything is fixed in this PR. Can you please review it ?\r\nThanks in advance. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1527\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1527\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1526","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1526\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1526\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1526\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1526","id":764591243,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NTgxNDg4","number":1526,"title":"added Hebrew thisworld corpus","user":{"login":"imvladikon","id":10088963,"node_id":"MDQ6VXNlcjEwMDg4OTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10088963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/imvladikon","html_url":"https:\/\/github.com\/imvladikon","followers_url":"https:\/\/api.github.com\/users\/imvladikon\/followers","following_url":"https:\/\/api.github.com\/users\/imvladikon\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/imvladikon\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/imvladikon\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/imvladikon\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/imvladikon\/orgs","repos_url":"https:\/\/api.github.com\/users\/imvladikon\/repos","events_url":"https:\/\/api.github.com\/users\/imvladikon\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/imvladikon\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-12T23:42:52Z","updated_at":"2020-12-18T10:47:30Z","closed_at":"2020-12-18T10:47:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1526","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1526","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1526.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1526.patch","merged_at":"2020-12-18T10:47:30Z"},"body":"added corpus from https:\/\/thisworld.online\/ , https:\/\/github.com\/thisworld1\/thisworld.online","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1526\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1526\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1525","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1525\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1525\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1525\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1525","id":764530582,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NTUwMzI2","number":1525,"title":"Adding a second branch for Atomic to fix git errors","user":{"login":"ontocord","id":8900094,"node_id":"MDQ6VXNlcjg5MDAwOTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8900094?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ontocord","html_url":"https:\/\/github.com\/ontocord","followers_url":"https:\/\/api.github.com\/users\/ontocord\/followers","following_url":"https:\/\/api.github.com\/users\/ontocord\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ontocord\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ontocord\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ontocord\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ontocord\/orgs","repos_url":"https:\/\/api.github.com\/users\/ontocord\/repos","events_url":"https:\/\/api.github.com\/users\/ontocord\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ontocord\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-12T22:54:50Z","updated_at":"2020-12-28T15:51:11Z","closed_at":"2020-12-28T15:51:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1525","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1525","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1525.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1525.patch","merged_at":"2020-12-28T15:51:11Z"},"body":"Adding the Atomic common sense dataset.\r\nSee https:\/\/homes.cs.washington.edu\/~msap\/atomic\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1525\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1525\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1524","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1524\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1524\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1524\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1524","id":764521672,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NTQ2MjI0","number":1524,"title":"ADD: swahili dataset for language modeling","user":{"login":"akshayb7","id":29649801,"node_id":"MDQ6VXNlcjI5NjQ5ODAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29649801?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/akshayb7","html_url":"https:\/\/github.com\/akshayb7","followers_url":"https:\/\/api.github.com\/users\/akshayb7\/followers","following_url":"https:\/\/api.github.com\/users\/akshayb7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/akshayb7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/akshayb7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/akshayb7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/akshayb7\/orgs","repos_url":"https:\/\/api.github.com\/users\/akshayb7\/repos","events_url":"https:\/\/api.github.com\/users\/akshayb7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/akshayb7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-12T22:47:18Z","updated_at":"2020-12-17T16:37:16Z","closed_at":"2020-12-17T16:37:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1524","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1524","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1524.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1524.patch","merged_at":"2020-12-17T16:37:16Z"},"body":"Add a corpus for Swahili language modelling. All tests passed locally. README updated with all information available.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1524\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1524\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1523","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1523\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1523\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1523\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1523","id":764359524,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NDYyMTE4","number":1523,"title":"Add eHealth Knowledge Discovery dataset","user":{"login":"mariagrandury","id":57645283,"node_id":"MDQ6VXNlcjU3NjQ1Mjgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57645283?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariagrandury","html_url":"https:\/\/github.com\/mariagrandury","followers_url":"https:\/\/api.github.com\/users\/mariagrandury\/followers","following_url":"https:\/\/api.github.com\/users\/mariagrandury\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariagrandury\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariagrandury\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariagrandury\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariagrandury\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariagrandury\/repos","events_url":"https:\/\/api.github.com\/users\/mariagrandury\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariagrandury\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-12T20:44:18Z","updated_at":"2020-12-17T17:02:41Z","closed_at":"2020-12-17T16:48:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1523","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1523","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1523.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1523.patch","merged_at":"2020-12-17T16:48:56Z"},"body":"This Spanish dataset can be used to mine knowledge from unstructured health texts. \r\n\r\nIn particular, for:\r\n- Entity recognition\r\n- Relation extraction\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1523\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1523\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1522","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1522\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1522\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1522\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1522","id":764341594,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NDUzNjg4","number":1522,"title":"Add semeval 2020 task 11","user":{"login":"ZacharySBrown","id":7950786,"node_id":"MDQ6VXNlcjc5NTA3ODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7950786?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZacharySBrown","html_url":"https:\/\/github.com\/ZacharySBrown","followers_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/followers","following_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/repos","events_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-12T20:32:14Z","updated_at":"2020-12-15T16:48:52Z","closed_at":"2020-12-15T16:48:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1522","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1522","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1522.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1522.patch","merged_at":"2020-12-15T16:48:52Z"},"body":"Adding in propaganda detection task (task 11) from Sem Eval 2020","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1522\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1522\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1521","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1521\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1521\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1521\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1521","id":764320841,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4NDQzOTgz","number":1521,"title":"Atomic","user":{"login":"ontocord","id":8900094,"node_id":"MDQ6VXNlcjg5MDAwOTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8900094?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ontocord","html_url":"https:\/\/github.com\/ontocord","followers_url":"https:\/\/api.github.com\/users\/ontocord\/followers","following_url":"https:\/\/api.github.com\/users\/ontocord\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ontocord\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ontocord\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ontocord\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ontocord\/orgs","repos_url":"https:\/\/api.github.com\/users\/ontocord\/repos","events_url":"https:\/\/api.github.com\/users\/ontocord\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ontocord\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-12T20:18:08Z","updated_at":"2020-12-12T22:56:48Z","closed_at":"2020-12-12T22:56:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1521","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1521","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1521.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1521.patch","merged_at":null},"body":"This is the ATOMIC common sense dataset. More info can be found here:\r\n\r\n* README.md still to be created.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1521\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1521\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1520","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1520\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1520\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1520\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1520","id":764140938,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MzU5MTA5","number":1520,"title":"ru_reviews dataset adding","user":{"login":"darshan-gandhi","id":44197177,"node_id":"MDQ6VXNlcjQ0MTk3MTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44197177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/darshan-gandhi","html_url":"https:\/\/github.com\/darshan-gandhi","followers_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/followers","following_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/orgs","repos_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/repos","events_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-12T18:13:06Z","updated_at":"2020-12-15T09:50:15Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1520","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1520","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1520.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1520.patch","merged_at":null},"body":"RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1520\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1520\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1519","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1519\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1519\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1519\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1519","id":764107360,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MzM5OTg5","number":1519,"title":"Initial commit for AQuaMuSe","user":{"login":"Karthik-Bhaskar","id":13200370,"node_id":"MDQ6VXNlcjEzMjAwMzcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13200370?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar","html_url":"https:\/\/github.com\/Karthik-Bhaskar","followers_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/followers","following_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/orgs","repos_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/repos","events_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-12T17:46:16Z","updated_at":"2020-12-18T13:50:42Z","closed_at":"2020-12-17T17:03:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1519","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1519","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1519.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1519.patch","merged_at":"2020-12-17T17:03:30Z"},"body":"There is an issue in generation of dummy data. Tests on real data have passed locally.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1519\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1519\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1518","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1518\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1518\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1518\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1518","id":764045722,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MzAyNzYy","number":1518,"title":"Add twi text","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-12T16:52:02Z","updated_at":"2020-12-13T18:53:37Z","closed_at":"2020-12-13T18:53:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1518","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1518","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1518.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1518.patch","merged_at":"2020-12-13T18:53:37Z"},"body":"Add Twi texts","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1518\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1518\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1517","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1517\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1517\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1517\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1517","id":764045214,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MzAyNDM1","number":1517,"title":"Kd conv smangrul","user":{"login":"pacman100","id":13534540,"node_id":"MDQ6VXNlcjEzNTM0NTQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13534540?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pacman100","html_url":"https:\/\/github.com\/pacman100","followers_url":"https:\/\/api.github.com\/users\/pacman100\/followers","following_url":"https:\/\/api.github.com\/users\/pacman100\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pacman100\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pacman100\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pacman100\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pacman100\/orgs","repos_url":"https:\/\/api.github.com\/users\/pacman100\/repos","events_url":"https:\/\/api.github.com\/users\/pacman100\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pacman100\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-12T16:51:30Z","updated_at":"2020-12-16T14:56:14Z","closed_at":"2020-12-16T14:56:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1517","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1517","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1517.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1517.patch","merged_at":"2020-12-16T14:56:14Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1517\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1517\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1516","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1516\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1516\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1516\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1516","id":764032327,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MjkzOTMw","number":1516,"title":"adding wrbsc","user":{"login":"kldarek","id":15803781,"node_id":"MDQ6VXNlcjE1ODAzNzgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15803781?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kldarek","html_url":"https:\/\/github.com\/kldarek","followers_url":"https:\/\/api.github.com\/users\/kldarek\/followers","following_url":"https:\/\/api.github.com\/users\/kldarek\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kldarek\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kldarek\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kldarek\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kldarek\/orgs","repos_url":"https:\/\/api.github.com\/users\/kldarek\/repos","events_url":"https:\/\/api.github.com\/users\/kldarek\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kldarek\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-12T16:38:40Z","updated_at":"2020-12-18T09:41:33Z","closed_at":"2020-12-18T09:41:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1516","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1516","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1516.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1516.patch","merged_at":"2020-12-18T09:41:33Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1516\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1516\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1515","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1515\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1515\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1515\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1515","id":764022753,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4Mjg3NDc0","number":1515,"title":"Add yoruba text","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-12T16:29:30Z","updated_at":"2020-12-13T18:37:58Z","closed_at":"2020-12-13T18:37:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1515","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1515","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1515.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1515.patch","merged_at":null},"body":"Adding Yoruba text C3","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1515\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1515\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1514","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1514\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1514\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1514\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1514","id":764017148,"node_id":"MDU6SXNzdWU3NjQwMTcxNDg=","number":1514,"title":"how to get all the options of a property in datasets ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-12T16:24:08Z","updated_at":"2020-12-18T08:08:57Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\ncould you tell me how I can get all unique options of a property of dataset?\r\nfor instance in case of boolq, if the user wants to know which unique labels it has, is there a way to access unique labels without getting all training data lables and then forming a set i mean? thanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1514\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1514\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1513","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1513\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1513\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1513\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1513","id":764016850,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MjgzNDUz","number":1513,"title":"app_reviews_by_users","user":{"login":"darshan-gandhi","id":44197177,"node_id":"MDQ6VXNlcjQ0MTk3MTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44197177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/darshan-gandhi","html_url":"https:\/\/github.com\/darshan-gandhi","followers_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/followers","following_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/orgs","repos_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/repos","events_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-12T16:23:49Z","updated_at":"2020-12-14T20:45:24Z","closed_at":"2020-12-14T20:45:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1513","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1513","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1513.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1513.patch","merged_at":"2020-12-14T20:45:24Z"},"body":"Software Applications User Reviews ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1513\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1513\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1512","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1512\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1512\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1512\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1512","id":764010722,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4Mjc5MzIy","number":1512,"title":"Add Hippocorpus Dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-12T16:17:53Z","updated_at":"2020-12-13T05:09:08Z","closed_at":"2020-12-13T05:08:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1512","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1512","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1512.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1512.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1512\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1512\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1511","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1511\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1511\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1511\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1511","id":764006477,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4Mjc2NDM5","number":1511,"title":"poleval cyberbullying","user":{"login":"czabo","id":75574105,"node_id":"MDQ6VXNlcjc1NTc0MTA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75574105?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/czabo","html_url":"https:\/\/github.com\/czabo","followers_url":"https:\/\/api.github.com\/users\/czabo\/followers","following_url":"https:\/\/api.github.com\/users\/czabo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/czabo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/czabo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/czabo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/czabo\/orgs","repos_url":"https:\/\/api.github.com\/users\/czabo\/repos","events_url":"https:\/\/api.github.com\/users\/czabo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/czabo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-12T16:13:44Z","updated_at":"2020-12-17T16:20:59Z","closed_at":"2020-12-17T16:19:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1511","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1511","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1511.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1511.patch","merged_at":"2020-12-17T16:19:58Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1511\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1511\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1510","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1510\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1510\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1510\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1510","id":763980369,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MjU4NDg3","number":1510,"title":"Add Dataset for (qa_srl)Question-Answer Driven Semantic Role Labeling","user":{"login":"bpatidar","id":12439573,"node_id":"MDQ6VXNlcjEyNDM5NTcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12439573?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bpatidar","html_url":"https:\/\/github.com\/bpatidar","followers_url":"https:\/\/api.github.com\/users\/bpatidar\/followers","following_url":"https:\/\/api.github.com\/users\/bpatidar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bpatidar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bpatidar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bpatidar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bpatidar\/orgs","repos_url":"https:\/\/api.github.com\/users\/bpatidar\/repos","events_url":"https:\/\/api.github.com\/users\/bpatidar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bpatidar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-12T15:48:11Z","updated_at":"2020-12-17T16:06:22Z","closed_at":"2020-12-17T16:06:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1510","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1510","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1510.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1510.patch","merged_at":"2020-12-17T16:06:22Z"},"body":"- Added tags, Readme file\r\n- Added code changes","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1510\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1510\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1509","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1509\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1509\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1509\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1509","id":763964857,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MjQ4NTgx","number":1509,"title":"Added dataset Makhzan","user":{"login":"arkhalid","id":14899066,"node_id":"MDQ6VXNlcjE0ODk5MDY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14899066?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arkhalid","html_url":"https:\/\/github.com\/arkhalid","followers_url":"https:\/\/api.github.com\/users\/arkhalid\/followers","following_url":"https:\/\/api.github.com\/users\/arkhalid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arkhalid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arkhalid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arkhalid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arkhalid\/orgs","repos_url":"https:\/\/api.github.com\/users\/arkhalid\/repos","events_url":"https:\/\/api.github.com\/users\/arkhalid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arkhalid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-12T15:34:07Z","updated_at":"2020-12-16T15:04:52Z","closed_at":"2020-12-16T15:04:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1509","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1509","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1509.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1509.patch","merged_at":"2020-12-16T15:04:51Z"},"body":"Need help with the dummy data.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1509\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1509\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1508","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1508\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1508\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1508\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1508","id":763908724,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MjEyODUy","number":1508,"title":"Fix namedsplit docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-12T14:43:38Z","updated_at":"2021-03-11T02:18:39Z","closed_at":"2020-12-15T12:57:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1508","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1508","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1508.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1508.patch","merged_at":"2020-12-15T12:57:48Z"},"body":"Fixes a broken link and `DatasetInfoMixin.split`'s docstring.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1508\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1508\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1507","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1507\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1507\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1507\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1507","id":763857872,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MTgyMzE2","number":1507,"title":"Add SelQA Dataset","user":{"login":"Bharat123rox","id":13381361,"node_id":"MDQ6VXNlcjEzMzgxMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13381361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Bharat123rox","html_url":"https:\/\/github.com\/Bharat123rox","followers_url":"https:\/\/api.github.com\/users\/Bharat123rox\/followers","following_url":"https:\/\/api.github.com\/users\/Bharat123rox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Bharat123rox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Bharat123rox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Bharat123rox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Bharat123rox\/orgs","repos_url":"https:\/\/api.github.com\/users\/Bharat123rox\/repos","events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-12T13:58:07Z","updated_at":"2020-12-16T16:49:23Z","closed_at":"2020-12-16T16:49:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1507","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1507","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1507.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1507.patch","merged_at":"2020-12-16T16:49:23Z"},"body":"Add the SelQA Dataset, a new benchmark for selection-based question answering tasks\r\nRepo: https:\/\/github.com\/emorynlp\/selqa\/\r\nPaper: https:\/\/arxiv.org\/pdf\/1606.08513.pdf","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1507\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1507\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1506","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1506\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1506\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1506\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1506","id":763846074,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MTc1ODEz","number":1506,"title":"Add nq_open question answering dataset","user":{"login":"Nilanshrajput","id":28673745,"node_id":"MDQ6VXNlcjI4NjczNzQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28673745?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Nilanshrajput","html_url":"https:\/\/github.com\/Nilanshrajput","followers_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/followers","following_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/orgs","repos_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/repos","events_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-12T13:46:48Z","updated_at":"2020-12-17T15:34:50Z","closed_at":"2020-12-17T15:34:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1506","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1506","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1506.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1506.patch","merged_at":null},"body":"Added nq_open Open-domain question answering dataset.\r\n\r\nThe NQ-Open task is currently being used to evaluate submissions to the EfficientQA competition, which is part of the NeurIPS 2020 competition track.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1506\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1506\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1505","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1505\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1505\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1505\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1505","id":763750773,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MTEyMTk5","number":1505,"title":"add ilist dataset","user":{"login":"vasudevgupta7","id":53136577,"node_id":"MDQ6VXNlcjUzMTM2NTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53136577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vasudevgupta7","html_url":"https:\/\/github.com\/vasudevgupta7","followers_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/followers","following_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/orgs","repos_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/repos","events_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-12T12:44:12Z","updated_at":"2020-12-17T15:43:07Z","closed_at":"2020-12-17T15:43:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1505","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1505","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1505.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1505.patch","merged_at":"2020-12-17T15:43:07Z"},"body":"This PR will add Indo-Aryan Language Identification Shared Task Dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1505\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1505\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1504","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1504\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1504\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1504\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1504","id":763697231,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MDczMzcw","number":1504,"title":"Add SentiWS dataset for pos-tagging and sentiment-scoring (German)","user":{"login":"harshalmittal4","id":24206326,"node_id":"MDQ6VXNlcjI0MjA2MzI2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24206326?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/harshalmittal4","html_url":"https:\/\/github.com\/harshalmittal4","followers_url":"https:\/\/api.github.com\/users\/harshalmittal4\/followers","following_url":"https:\/\/api.github.com\/users\/harshalmittal4\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/harshalmittal4\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/harshalmittal4\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/harshalmittal4\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/harshalmittal4\/orgs","repos_url":"https:\/\/api.github.com\/users\/harshalmittal4\/repos","events_url":"https:\/\/api.github.com\/users\/harshalmittal4\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/harshalmittal4\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-12T12:17:53Z","updated_at":"2020-12-15T18:32:38Z","closed_at":"2020-12-15T18:32:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1504","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1504","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1504.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1504.patch","merged_at":"2020-12-15T18:32:38Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1504\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1504\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1503","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1503\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1503\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1503\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1503","id":763667489,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MDUxNDM2","number":1503,"title":"Adding COVID QA dataset in Chinese and English from UC SanDiego","user":{"login":"vrindaprabhu","id":16264631,"node_id":"MDQ6VXNlcjE2MjY0NjMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16264631?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vrindaprabhu","html_url":"https:\/\/github.com\/vrindaprabhu","followers_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/followers","following_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/orgs","repos_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/repos","events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-12T12:02:48Z","updated_at":"2021-02-16T05:29:18Z","closed_at":"2020-12-17T15:29:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1503","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1503","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1503.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1503.patch","merged_at":"2020-12-17T15:29:26Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1503\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1503\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1502","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1502\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1502\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1502\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1502","id":763658208,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM4MDQ1OTY5","number":1502,"title":"Add Senti_Lex Dataset","user":{"login":"KMFODA","id":35491698,"node_id":"MDQ6VXNlcjM1NDkxNjk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35491698?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KMFODA","html_url":"https:\/\/github.com\/KMFODA","followers_url":"https:\/\/api.github.com\/users\/KMFODA\/followers","following_url":"https:\/\/api.github.com\/users\/KMFODA\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KMFODA\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KMFODA\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KMFODA\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KMFODA\/orgs","repos_url":"https:\/\/api.github.com\/users\/KMFODA\/repos","events_url":"https:\/\/api.github.com\/users\/KMFODA\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KMFODA\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-12T11:55:29Z","updated_at":"2020-12-28T14:01:12Z","closed_at":"2020-12-28T14:01:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1502","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1502","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1502.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1502.patch","merged_at":"2020-12-28T14:01:12Z"},"body":"TODO:\r\nFix feature format issue\r\nCreate dataset_info.json file\r\nRun pytests\r\nMake Style","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1502\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1502\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1501","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1501\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1501\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1501\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1501","id":763517647,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3OTYzMDU5","number":1501,"title":"Adds XED dataset","user":{"login":"harshalmittal4","id":24206326,"node_id":"MDQ6VXNlcjI0MjA2MzI2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24206326?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/harshalmittal4","html_url":"https:\/\/github.com\/harshalmittal4","followers_url":"https:\/\/api.github.com\/users\/harshalmittal4\/followers","following_url":"https:\/\/api.github.com\/users\/harshalmittal4\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/harshalmittal4\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/harshalmittal4\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/harshalmittal4\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/harshalmittal4\/orgs","repos_url":"https:\/\/api.github.com\/users\/harshalmittal4\/repos","events_url":"https:\/\/api.github.com\/users\/harshalmittal4\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/harshalmittal4\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-12T09:47:00Z","updated_at":"2020-12-14T21:20:59Z","closed_at":"2020-12-14T21:20:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1501","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1501","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1501.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1501.patch","merged_at":"2020-12-14T21:20:59Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1501\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1501\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1500","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1500\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1500\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1500\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1500","id":763479305,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3OTM0OTI1","number":1500,"title":"adding polsum","user":{"login":"kldarek","id":15803781,"node_id":"MDQ6VXNlcjE1ODAzNzgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15803781?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kldarek","html_url":"https:\/\/github.com\/kldarek","followers_url":"https:\/\/api.github.com\/users\/kldarek\/followers","following_url":"https:\/\/api.github.com\/users\/kldarek\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kldarek\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kldarek\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kldarek\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kldarek\/orgs","repos_url":"https:\/\/api.github.com\/users\/kldarek\/repos","events_url":"https:\/\/api.github.com\/users\/kldarek\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kldarek\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-12T09:05:29Z","updated_at":"2020-12-18T09:43:43Z","closed_at":"2020-12-18T09:43:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1500","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1500","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1500.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1500.patch","merged_at":"2020-12-18T09:43:43Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1500\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1500\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1499","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1499\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1499\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1499\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1499","id":763464693,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3OTIyNjA3","number":1499,"title":"update the dataset id_newspapers_2018","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-12T08:47:12Z","updated_at":"2020-12-14T15:28:07Z","closed_at":"2020-12-14T15:28:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1499","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1499","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1499.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1499.patch","merged_at":"2020-12-14T15:28:07Z"},"body":"Hi, I need to update the link to the dataset. The link in the previous PR was to a small test dataset. Thanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1499\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1499\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1498","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1498\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1498\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1498\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1498","id":763303606,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3Nzc2MjM5","number":1498,"title":"add stereoset","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-12T05:04:37Z","updated_at":"2020-12-18T10:03:53Z","closed_at":"2020-12-18T10:03:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1498","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1498","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1498.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1498.patch","merged_at":"2020-12-18T10:03:53Z"},"body":"StereoSet is a dataset that measures stereotype bias in language models. StereoSet consists of 17,000 sentences that measures model preferences across gender, race, religion, and profession.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1498\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1498\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1497","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1497\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1497\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1497\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1497","id":763180824,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3NjYxNzY2","number":1497,"title":"adding fake-news-english-5","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-12T02:13:11Z","updated_at":"2020-12-17T20:07:17Z","closed_at":"2020-12-17T20:07:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1497","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1497","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1497.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1497.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1497\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1497\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1496","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1496\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1496\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1496\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1496","id":763091663,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3NTc4NzQw","number":1496,"title":"Add Multi-Dimensional Gender Bias classification data","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-12T00:17:37Z","updated_at":"2020-12-14T21:14:55Z","closed_at":"2020-12-14T21:14:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1496","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1496","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1496.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1496.patch","merged_at":"2020-12-14T21:14:55Z"},"body":"https:\/\/parl.ai\/projects\/md_gender\/\r\n\r\nMostly has the ABOUT dimension since the others are inferred from other datasets in most cases.\r\n\r\nI tried to keep the dummy data small but one of the configs has 140 splits ( > 56KB data)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1496\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1496\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1495","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1495\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1495\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1495\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1495","id":763025562,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3NTE2ODE4","number":1495,"title":"Opus DGT added","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-11T23:05:09Z","updated_at":"2020-12-17T14:38:41Z","closed_at":"2020-12-17T14:38:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1495","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1495","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1495.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1495.patch","merged_at":"2020-12-17T14:38:41Z"},"body":"Dataset : http:\/\/opus.nlpl.eu\/DGT.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1495\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1495\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1494","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1494\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1494\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1494\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1494","id":762992601,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3NDg2MzU4","number":1494,"title":"Added Opus Wikipedia","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-11T22:28:03Z","updated_at":"2020-12-17T14:38:28Z","closed_at":"2020-12-17T14:38:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1494","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1494","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1494.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1494.patch","merged_at":"2020-12-17T14:38:28Z"},"body":"Dataset : http:\/\/opus.nlpl.eu\/Wikipedia.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1494\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1494\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1493","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1493\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1493\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1493\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1493","id":762979415,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3NDc0MDc1","number":1493,"title":"Added RONEC dataset.","user":{"login":"iliemihai","id":2815308,"node_id":"MDQ6VXNlcjI4MTUzMDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2815308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliemihai","html_url":"https:\/\/github.com\/iliemihai","followers_url":"https:\/\/api.github.com\/users\/iliemihai\/followers","following_url":"https:\/\/api.github.com\/users\/iliemihai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliemihai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliemihai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliemihai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliemihai\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliemihai\/repos","events_url":"https:\/\/api.github.com\/users\/iliemihai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliemihai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-11T22:14:50Z","updated_at":"2020-12-21T14:48:56Z","closed_at":"2020-12-21T14:48:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1493","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1493","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1493.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1493.patch","merged_at":"2020-12-21T14:48:56Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1493\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1493\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1492","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1492\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1492\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1492\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1492","id":762965239,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3NDYxMjc3","number":1492,"title":"OPUS UBUNTU dataset","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-11T22:01:37Z","updated_at":"2020-12-17T14:38:16Z","closed_at":"2020-12-17T14:38:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1492","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1492","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1492.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1492.patch","merged_at":"2020-12-17T14:38:15Z"},"body":"Dataset : http:\/\/opus.nlpl.eu\/Ubuntu.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1492\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1492\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1491","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1491\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1491\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1491\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1491","id":762920920,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3NDIxMTc3","number":1491,"title":"added opus GNOME data","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-11T21:21:51Z","updated_at":"2020-12-17T14:20:23Z","closed_at":"2020-12-17T14:20:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1491","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1491","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1491.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1491.patch","merged_at":"2020-12-17T14:20:23Z"},"body":"Dataset : http:\/\/opus.nlpl.eu\/GNOME.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1491\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1491\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1490","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1490\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1490\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1490\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1490","id":762915346,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3NDE2MDU5","number":1490,"title":"ADD: opus_rf dataset for translation","user":{"login":"akshayb7","id":29649801,"node_id":"MDQ6VXNlcjI5NjQ5ODAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29649801?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/akshayb7","html_url":"https:\/\/github.com\/akshayb7","followers_url":"https:\/\/api.github.com\/users\/akshayb7\/followers","following_url":"https:\/\/api.github.com\/users\/akshayb7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/akshayb7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/akshayb7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/akshayb7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/akshayb7\/orgs","repos_url":"https:\/\/api.github.com\/users\/akshayb7\/repos","events_url":"https:\/\/api.github.com\/users\/akshayb7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/akshayb7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-11T21:16:43Z","updated_at":"2020-12-13T19:12:24Z","closed_at":"2020-12-13T19:12:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1490","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1490","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1490.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1490.patch","merged_at":"2020-12-13T19:12:24Z"},"body":"Passed all local tests. Hopefully passes all Circle CI tests too. Tried to keep the commit history clean.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1490\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1490\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1489","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1489\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1489\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1489\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1489","id":762908763,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3NDA5OTkx","number":1489,"title":"Fake news english 4","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-11T21:10:35Z","updated_at":"2020-12-12T19:39:52Z","closed_at":"2020-12-12T19:38:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1489","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1489","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1489.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1489.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1489\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1489\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1488","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1488\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1488\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1488\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1488","id":762860679,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3MzY1ODUz","number":1488,"title":"Adding NELL","user":{"login":"ontocord","id":8900094,"node_id":"MDQ6VXNlcjg5MDAwOTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8900094?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ontocord","html_url":"https:\/\/github.com\/ontocord","followers_url":"https:\/\/api.github.com\/users\/ontocord\/followers","following_url":"https:\/\/api.github.com\/users\/ontocord\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ontocord\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ontocord\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ontocord\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ontocord\/orgs","repos_url":"https:\/\/api.github.com\/users\/ontocord\/repos","events_url":"https:\/\/api.github.com\/users\/ontocord\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ontocord\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-11T20:25:25Z","updated_at":"2021-01-07T08:37:07Z","closed_at":"2020-12-21T14:45:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1488","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1488","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1488.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1488.patch","merged_at":"2020-12-21T14:44:59Z"},"body":"NELL is a knowledge base and knowledge graph along with sentences used to create the KB. See http:\/\/rtw.ml.cmu.edu\/rtw\/ for more details.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1488\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1488\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1487","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1487\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1487\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1487\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1487","id":762794921,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3MzA2MTEx","number":1487,"title":" added conv_ai_3 dataset","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-11T19:26:26Z","updated_at":"2020-12-28T09:38:40Z","closed_at":"2020-12-28T09:38:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1487","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1487","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1487.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1487.patch","merged_at":"2020-12-28T09:38:39Z"},"body":"Dataset : https:\/\/github.com\/aliannejadi\/ClariQ\/\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1487\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1487\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1486","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1486\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1486\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1486\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1486","id":762790102,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3MzAxODY2","number":1486,"title":"hate speech 18 dataset","user":{"login":"czabo","id":75574105,"node_id":"MDQ6VXNlcjc1NTc0MTA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75574105?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/czabo","html_url":"https:\/\/github.com\/czabo","followers_url":"https:\/\/api.github.com\/users\/czabo\/followers","following_url":"https:\/\/api.github.com\/users\/czabo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/czabo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/czabo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/czabo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/czabo\/orgs","repos_url":"https:\/\/api.github.com\/users\/czabo\/repos","events_url":"https:\/\/api.github.com\/users\/czabo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/czabo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-11T19:22:14Z","updated_at":"2020-12-14T19:43:18Z","closed_at":"2020-12-14T19:43:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1486","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1486","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1486.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1486.patch","merged_at":"2020-12-14T19:43:18Z"},"body":"This is again a PR instead of #1339, because something went wrong there. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1486\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1486\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1485","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1485\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1485\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1485\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1485","id":762774822,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3Mjg4MTg0","number":1485,"title":"Re-added wiki_movies dataset due to previous PR having changes from m\u2026","user":{"login":"aclifton314","id":53267795,"node_id":"MDQ6VXNlcjUzMjY3Nzk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53267795?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aclifton314","html_url":"https:\/\/github.com\/aclifton314","followers_url":"https:\/\/api.github.com\/users\/aclifton314\/followers","following_url":"https:\/\/api.github.com\/users\/aclifton314\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aclifton314\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aclifton314\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aclifton314\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aclifton314\/orgs","repos_url":"https:\/\/api.github.com\/users\/aclifton314\/repos","events_url":"https:\/\/api.github.com\/users\/aclifton314\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aclifton314\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-11T19:07:48Z","updated_at":"2020-12-14T14:08:22Z","closed_at":"2020-12-14T14:08:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1485","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1485","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1485.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1485.patch","merged_at":"2020-12-14T14:08:22Z"},"body":"\u2026any other unassociated files.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1485\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1485\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1484","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1484\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1484\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1484\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1484","id":762747096,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3MjYzMDc5","number":1484,"title":"Add peer-read dataset","user":{"login":"vinaykudari","id":34424769,"node_id":"MDQ6VXNlcjM0NDI0NzY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34424769?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vinaykudari","html_url":"https:\/\/github.com\/vinaykudari","followers_url":"https:\/\/api.github.com\/users\/vinaykudari\/followers","following_url":"https:\/\/api.github.com\/users\/vinaykudari\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vinaykudari\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vinaykudari\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vinaykudari\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vinaykudari\/orgs","repos_url":"https:\/\/api.github.com\/users\/vinaykudari\/repos","events_url":"https:\/\/api.github.com\/users\/vinaykudari\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vinaykudari\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-11T18:43:44Z","updated_at":"2020-12-21T09:40:50Z","closed_at":"2020-12-21T09:40:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1484","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1484","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1484.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1484.patch","merged_at":"2020-12-21T09:40:50Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1484\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1484\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1483","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1483\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1483\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1483\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1483","id":762712337,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3MjMxMzQ4","number":1483,"title":"Added Times of India News Headlines Dataset","user":{"login":"tanmoyio","id":33005287,"node_id":"MDQ6VXNlcjMzMDA1Mjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33005287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tanmoyio","html_url":"https:\/\/github.com\/tanmoyio","followers_url":"https:\/\/api.github.com\/users\/tanmoyio\/followers","following_url":"https:\/\/api.github.com\/users\/tanmoyio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tanmoyio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tanmoyio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tanmoyio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tanmoyio\/orgs","repos_url":"https:\/\/api.github.com\/users\/tanmoyio\/repos","events_url":"https:\/\/api.github.com\/users\/tanmoyio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tanmoyio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-11T18:12:38Z","updated_at":"2020-12-14T18:08:08Z","closed_at":"2020-12-14T18:08:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1483","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1483","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1483.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1483.patch","merged_at":"2020-12-14T18:08:07Z"},"body":"Dataset name: Times of India News Headlines\r\nlink: https:\/\/dataverse.harvard.edu\/dataset.xhtml?persistentId=doi:10.7910\/DVN\/DPQMQH","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1483\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1483\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1482","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1482\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1482\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1482\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1482","id":762686820,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3MjA4NDk3","number":1482,"title":"Adding medical database chinese and english","user":{"login":"vrindaprabhu","id":16264631,"node_id":"MDQ6VXNlcjE2MjY0NjMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16264631?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vrindaprabhu","html_url":"https:\/\/github.com\/vrindaprabhu","followers_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/followers","following_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/orgs","repos_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/repos","events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-11T17:50:39Z","updated_at":"2021-02-16T05:28:36Z","closed_at":"2020-12-15T18:23:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1482","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1482","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1482.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1482.patch","merged_at":"2020-12-15T18:23:53Z"},"body":"Error in creating dummy dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1482\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1482\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1481","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1481\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1481\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1481\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1481","id":762579658,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3MTEwOTM1","number":1481,"title":"Fix ADD_NEW_DATASET to avoid rebasing once pushed","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-11T16:27:49Z","updated_at":"2021-01-07T10:10:20Z","closed_at":"2021-01-07T10:10:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1481","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1481","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1481.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1481.patch","merged_at":"2021-01-07T10:10:20Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1481\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1481\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1480","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1480\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1480\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1480\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1480","id":762530805,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM3MDY1NDMx","number":1480,"title":"Adding the Mac-Morpho dataset","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-11T16:01:38Z","updated_at":"2020-12-21T10:03:37Z","closed_at":"2020-12-21T10:03:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1480","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1480","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1480.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1480.patch","merged_at":"2020-12-21T10:03:37Z"},"body":"Adding the Mac-Morpho dataset, a Portuguese language dataset for Part-of-speech tagging tasks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1480\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1480\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1479","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1479\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1479\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1479\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1479","id":762320736,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2ODc3NTEz","number":1479,"title":"Add narrativeQA","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-11T12:58:31Z","updated_at":"2020-12-11T13:33:23Z","closed_at":"2020-12-11T13:33:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1479","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1479","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1479.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1479.patch","merged_at":"2020-12-11T13:33:23Z"},"body":"Redo of #1368 #309 #499\r\n\r\nIn redoing the dummy data a few times, I ended up adding a load of files to git. Hopefully this should work.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1479\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1479\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1478","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1478\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1478\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1478\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1478","id":762293076,"node_id":"MDU6SXNzdWU3NjIyOTMwNzY=","number":1478,"title":"Inconsistent argument names.","user":{"login":"Fraser-Greenlee","id":8402500,"node_id":"MDQ6VXNlcjg0MDI1MDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8402500?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Fraser-Greenlee","html_url":"https:\/\/github.com\/Fraser-Greenlee","followers_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/followers","following_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/orgs","repos_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/repos","events_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Fraser-Greenlee\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-11T12:19:38Z","updated_at":"2020-12-19T15:03:39Z","closed_at":"2020-12-19T15:03:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Just find it a wee bit odd that in the transformers library `predictions` are those made by the model:\r\nhttps:\/\/github.com\/huggingface\/transformers\/blob\/master\/src\/transformers\/trainer_utils.py#L51-L61\r\n\r\nWhile in many datasets metrics they are the ground truth labels:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/c3f53792a744ede18d748a1133b6597fdd2d8d18\/metrics\/accuracy\/accuracy.py#L31-L40\r\n\r\nDo you think predictions & references should be swapped? I'd be willing to do some refactoring here if you agree.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1478\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1478\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1477","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1477\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1477\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1477\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1477","id":762288811,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2ODQ5NzM4","number":1477,"title":"Jigsaw toxicity pred","user":{"login":"taihim","id":13764071,"node_id":"MDQ6VXNlcjEzNzY0MDcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13764071?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/taihim","html_url":"https:\/\/github.com\/taihim","followers_url":"https:\/\/api.github.com\/users\/taihim\/followers","following_url":"https:\/\/api.github.com\/users\/taihim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/taihim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/taihim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/taihim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/taihim\/orgs","repos_url":"https:\/\/api.github.com\/users\/taihim\/repos","events_url":"https:\/\/api.github.com\/users\/taihim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/taihim\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-11T12:13:20Z","updated_at":"2020-12-14T13:19:35Z","closed_at":"2020-12-14T13:19:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1477","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1477","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1477.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1477.patch","merged_at":"2020-12-14T13:19:35Z"},"body":"Managed to mess up my original pull request, opening a fresh one incorporating the changes suggested by @lhoestq.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1477\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1477\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1476","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1476\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1476\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1476\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1476","id":762256048,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2ODIxNDI5","number":1476,"title":"Add Spanish Billion Words Corpus","user":{"login":"mariagrandury","id":57645283,"node_id":"MDQ6VXNlcjU3NjQ1Mjgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57645283?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariagrandury","html_url":"https:\/\/github.com\/mariagrandury","followers_url":"https:\/\/api.github.com\/users\/mariagrandury\/followers","following_url":"https:\/\/api.github.com\/users\/mariagrandury\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariagrandury\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariagrandury\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariagrandury\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariagrandury\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariagrandury\/repos","events_url":"https:\/\/api.github.com\/users\/mariagrandury\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariagrandury\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-11T11:24:58Z","updated_at":"2020-12-17T17:04:08Z","closed_at":"2020-12-14T13:14:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1476","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1476","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1476.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1476.patch","merged_at":"2020-12-14T13:14:31Z"},"body":"Add an unannotated Spanish corpus of nearly 1.5 billion words, compiled from different resources from the web.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1476\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1476\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1475","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1475\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1475\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1475\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1475","id":762187000,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2NzYxMDQz","number":1475,"title":"Fix XML iterparse in opus_dogc dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-11T10:08:18Z","updated_at":"2020-12-17T11:28:47Z","closed_at":"2020-12-17T11:28:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1475","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1475","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1475.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1475.patch","merged_at":"2020-12-17T11:28:46Z"},"body":"I forgot to add `elem.clear()` to clear the element from memory.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1475\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1475\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1474","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1474\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1474\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1474\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1474","id":762083706,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2NjY4MjU3","number":1474,"title":"Create JSON dummy data without loading all dataset in memory","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-11T08:44:23Z","updated_at":"2020-12-17T12:14:42Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1474","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1474","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1474.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1474.patch","merged_at":null},"body":"See #1442.\r\n\r\nThe statement `json.load()` loads **all the file content in memory**.\r\n\r\nIn order to avoid this, file content should be parsed **iteratively**, by using the library `ijson` e.g.\r\n\r\nI have refactorized the code into a function `_create_json_dummy_data` and I have added some tests.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1474\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1474\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1473","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1473\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1473\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1473\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1473","id":762055694,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2NjQyODI5","number":1473,"title":"add srwac","user":{"login":"IvanZidov","id":11391118,"node_id":"MDQ6VXNlcjExMzkxMTE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11391118?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/IvanZidov","html_url":"https:\/\/github.com\/IvanZidov","followers_url":"https:\/\/api.github.com\/users\/IvanZidov\/followers","following_url":"https:\/\/api.github.com\/users\/IvanZidov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/IvanZidov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/IvanZidov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/IvanZidov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/IvanZidov\/orgs","repos_url":"https:\/\/api.github.com\/users\/IvanZidov\/repos","events_url":"https:\/\/api.github.com\/users\/IvanZidov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/IvanZidov\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-11T08:20:29Z","updated_at":"2020-12-17T11:40:59Z","closed_at":"2020-12-17T11:40:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1473","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1473","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1473.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1473.patch","merged_at":"2020-12-17T11:40:59Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1473\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1473\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1472","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1472\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1472\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1472\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1472","id":762037907,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2NjI2NjUx","number":1472,"title":"add Srwac","user":{"login":"IvanZidov","id":11391118,"node_id":"MDQ6VXNlcjExMzkxMTE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11391118?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/IvanZidov","html_url":"https:\/\/github.com\/IvanZidov","followers_url":"https:\/\/api.github.com\/users\/IvanZidov\/followers","following_url":"https:\/\/api.github.com\/users\/IvanZidov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/IvanZidov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/IvanZidov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/IvanZidov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/IvanZidov\/orgs","repos_url":"https:\/\/api.github.com\/users\/IvanZidov\/repos","events_url":"https:\/\/api.github.com\/users\/IvanZidov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/IvanZidov\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-11T08:04:57Z","updated_at":"2020-12-11T08:08:12Z","closed_at":"2020-12-11T08:05:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1472","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1472","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1472.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1472.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1472\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1472\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1471","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1471\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1471\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1471\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1471","id":761842512,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2NDUyMzcy","number":1471,"title":"Adding the HAREM dataset","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-11T03:21:10Z","updated_at":"2020-12-22T10:37:33Z","closed_at":"2020-12-22T10:37:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1471","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1471","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1471.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1471.patch","merged_at":"2020-12-22T10:37:33Z"},"body":"Adding the HAREM dataset, a Portuguese language dataset for NER tasks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1471\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1471\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1470","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1470\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1470\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1470\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1470","id":761791065,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2NDA2MjQx","number":1470,"title":"Add wiki lingua dataset","user":{"login":"katnoria","id":7674948,"node_id":"MDQ6VXNlcjc2NzQ5NDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7674948?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/katnoria","html_url":"https:\/\/github.com\/katnoria","followers_url":"https:\/\/api.github.com\/users\/katnoria\/followers","following_url":"https:\/\/api.github.com\/users\/katnoria\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/katnoria\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/katnoria\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/katnoria\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/katnoria\/orgs","repos_url":"https:\/\/api.github.com\/users\/katnoria\/repos","events_url":"https:\/\/api.github.com\/users\/katnoria\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/katnoria\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-12-11T02:04:18Z","updated_at":"2020-12-16T15:27:13Z","closed_at":"2020-12-16T15:27:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1470","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1470","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1470.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1470.patch","merged_at":null},"body":"Hello @lhoestq ,\r\n\r\nI am opening a fresh pull request as advised in my original PR https:\/\/github.com\/huggingface\/datasets\/pull\/1308\r\n\r\nThanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1470\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1470\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1469","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1469\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1469\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1469\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1469","id":761611315,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2MjUzMDk4","number":1469,"title":"ADD: Wino_bias dataset","user":{"login":"akshayb7","id":29649801,"node_id":"MDQ6VXNlcjI5NjQ5ODAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29649801?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/akshayb7","html_url":"https:\/\/github.com\/akshayb7","followers_url":"https:\/\/api.github.com\/users\/akshayb7\/followers","following_url":"https:\/\/api.github.com\/users\/akshayb7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/akshayb7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/akshayb7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/akshayb7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/akshayb7\/orgs","repos_url":"https:\/\/api.github.com\/users\/akshayb7\/repos","events_url":"https:\/\/api.github.com\/users\/akshayb7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/akshayb7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T20:59:45Z","updated_at":"2020-12-13T19:13:57Z","closed_at":"2020-12-13T19:13:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1469","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1469","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1469.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1469.patch","merged_at":"2020-12-13T19:13:57Z"},"body":"Updated PR to counter messed up history of previous one (https:\/\/github.com\/huggingface\/datasets\/pull\/1235) due to rebase.\r\nRemoved manual downloading of dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1469\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1469\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1468","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1468\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1468\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1468\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1468","id":761607531,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2MjQ5OTg0","number":1468,"title":"add Indonesian newspapers (id_newspapers_2018)","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-10T20:54:12Z","updated_at":"2020-12-12T08:50:51Z","closed_at":"2020-12-11T17:04:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1468","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1468","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1468.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1468.patch","merged_at":"2020-12-11T17:04:41Z"},"body":"The dataset contains around 500K articles (136M of words) from 7 Indonesian newspapers. The size of uncompressed 500K json files (newspapers-json.tgz) is around 2.2GB.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1468\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1468\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1467","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1467\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1467\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1467\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1467","id":761557290,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2MjA3NDcx","number":1467,"title":"adding snow_simplified_japanese_corpus","user":{"login":"forest1988","id":2755894,"node_id":"MDQ6VXNlcjI3NTU4OTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2755894?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/forest1988","html_url":"https:\/\/github.com\/forest1988","followers_url":"https:\/\/api.github.com\/users\/forest1988\/followers","following_url":"https:\/\/api.github.com\/users\/forest1988\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/forest1988\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/forest1988\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/forest1988\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/forest1988\/orgs","repos_url":"https:\/\/api.github.com\/users\/forest1988\/repos","events_url":"https:\/\/api.github.com\/users\/forest1988\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/forest1988\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-10T19:45:03Z","updated_at":"2020-12-17T13:22:48Z","closed_at":"2020-12-17T11:25:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1467","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1467","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1467.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1467.patch","merged_at":"2020-12-17T11:25:34Z"},"body":"Adding simplified Japanese corpus \"SNOW T15\" and \"SNOW T23\".\r\nThey contain original Japanese, simplified Japanese, and original English (the original text is gotten from en-ja translation corpus). Hence, it can be used not only for Japanese simplification but also for en-ja translation.\r\n\r\n- http:\/\/www.jnlp.org\/SNOW\/T15\r\n- http:\/\/www.jnlp.org\/SNOW\/T23","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1467\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1467\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1466","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1466\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1466\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1466\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1466","id":761554357,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2MjA0OTMx","number":1466,"title":"Add Turkish News Category Dataset (270K).Updates were made for review\u2026","user":{"login":"basakbuluz","id":41359672,"node_id":"MDQ6VXNlcjQxMzU5Njcy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/41359672?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/basakbuluz","html_url":"https:\/\/github.com\/basakbuluz","followers_url":"https:\/\/api.github.com\/users\/basakbuluz\/followers","following_url":"https:\/\/api.github.com\/users\/basakbuluz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/basakbuluz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/basakbuluz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/basakbuluz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/basakbuluz\/orgs","repos_url":"https:\/\/api.github.com\/users\/basakbuluz\/repos","events_url":"https:\/\/api.github.com\/users\/basakbuluz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/basakbuluz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-10T19:41:12Z","updated_at":"2020-12-11T14:27:15Z","closed_at":"2020-12-11T14:27:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1466","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1466","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1466.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1466.patch","merged_at":"2020-12-11T14:27:14Z"},"body":"This PR adds the **Turkish News Categories Dataset (270K)** dataset which is a text classification dataset by me and @yavuzKomecoglu. Turkish news dataset consisting of **273601 news in 17 categories**, compiled from printed media and news websites between 2010 and 2017 by the [Interpress](https:\/\/www.interpress.com\/) media monitoring company.\r\n\r\n**Note**: Resubmitted as a clean version of the previous Pull Request(#1419). @SBrandeis @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1466\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1466\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1465","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1465\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1465\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1465\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1465","id":761538931,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2MTkxNjM1","number":1465,"title":"Add clean menyo20k data","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T19:22:00Z","updated_at":"2020-12-14T10:30:21Z","closed_at":"2020-12-14T10:30:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1465","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1465","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1465.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1465.patch","merged_at":"2020-12-14T10:30:21Z"},"body":"New Clean PR for menyo20k_mt","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1465\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1465\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1464","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1464\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1464\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1464\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1464","id":761533566,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2MTg3MDA0","number":1464,"title":"Reddit jokes","user":{"login":"tanmoyio","id":33005287,"node_id":"MDQ6VXNlcjMzMDA1Mjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33005287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tanmoyio","html_url":"https:\/\/github.com\/tanmoyio","followers_url":"https:\/\/api.github.com\/users\/tanmoyio\/followers","following_url":"https:\/\/api.github.com\/users\/tanmoyio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tanmoyio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tanmoyio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tanmoyio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tanmoyio\/orgs","repos_url":"https:\/\/api.github.com\/users\/tanmoyio\/repos","events_url":"https:\/\/api.github.com\/users\/tanmoyio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tanmoyio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-10T19:15:19Z","updated_at":"2020-12-10T20:14:00Z","closed_at":"2020-12-10T20:14:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1464","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1464","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1464.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1464.patch","merged_at":null},"body":"196k Reddit Jokes dataset\r\nDataset link- https:\/\/raw.githubusercontent.com\/taivop\/joke-dataset\/master\/reddit_jokes.json","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1464\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1464\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1463","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1463\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1463\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1463\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1463","id":761510908,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2MTY3NTMw","number":1463,"title":"Adding enriched_web_nlg features + handling xml bugs","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T18:48:19Z","updated_at":"2020-12-17T10:44:35Z","closed_at":"2020-12-17T10:44:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1463","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1463","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1463.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1463.patch","merged_at":"2020-12-17T10:44:33Z"},"body":"This PR adds features of the enriched_web_nlg dataset that were not present yet (most notably sorted rdf triplet sets), and deals with some xml issues that led to returning no data in cases where surgery could be performed to salvage it.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1463\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1463\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1462","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1462\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1462\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1462\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1462","id":761489274,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2MTQ4Njc1","number":1462,"title":"Added conv ai 2 (Again)","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-10T18:21:55Z","updated_at":"2020-12-13T00:21:32Z","closed_at":"2020-12-13T00:21:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1462","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1462","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1462.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1462.patch","merged_at":null},"body":"The original PR -> https:\/\/github.com\/huggingface\/datasets\/pull\/1383\r\n\r\nReason for creating again - \r\n\r\nThe reason I had to create the PR again was due to the master rebasing issue. After rebasing the changes, all the previous commits got added to the branch. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1462\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1462\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1461","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1461\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1461\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1461\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1461","id":761415420,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2MDgzODY5","number":1461,"title":"Adding NewsQA dataset","user":{"login":"rsanjaykamath","id":18527321,"node_id":"MDQ6VXNlcjE4NTI3MzIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18527321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rsanjaykamath","html_url":"https:\/\/github.com\/rsanjaykamath","followers_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/followers","following_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/orgs","repos_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/repos","events_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-10T17:01:10Z","updated_at":"2020-12-17T18:29:03Z","closed_at":"2020-12-17T18:27:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1461","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1461","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1461.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1461.patch","merged_at":"2020-12-17T18:27:36Z"},"body":"Since the dataset has legal restrictions to circulate the original data. It has to be manually downloaded by the user and loaded to the library. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1461\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1461\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1460","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1460\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1460\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1460\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1460","id":761349149,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM2MDI3NzYy","number":1460,"title":"add Bengali Hate Speech dataset","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-12-10T15:40:55Z","updated_at":"2021-09-17T16:54:53Z","closed_at":"2021-01-04T14:08:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1460","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1460","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1460.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1460.patch","merged_at":"2021-01-04T14:08:29Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1460\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1460\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1459","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1459\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1459\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1459\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1459","id":761258395,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1OTUxMDY2","number":1459,"title":"Add Google Conceptual Captions Dataset (manual download)","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T13:50:33Z","updated_at":"2021-02-22T13:47:46Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1459","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1459","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1459.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1459.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1459\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1459\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1458","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1458\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1458\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1458\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1458","id":761235962,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1OTMyMTA1","number":1458,"title":"Add id_nergrit_corpus","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T13:20:34Z","updated_at":"2020-12-17T10:45:15Z","closed_at":"2020-12-17T10:45:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1458","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1458","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1458.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1458.patch","merged_at":"2020-12-17T10:45:15Z"},"body":"Nergrit Corpus is a dataset collection of Indonesian Named Entity Recognition, Statement Extraction, and Sentiment Analysis. \r\nRecently my PR for id_nergrit_ner has been accepted and merged to the main branch. The id_nergrit_ner has only one dataset (NER), and this new PR renamed the dataset from id_nergrit_ner to id_nergrit_corpus and added 2 other remaining datasets (Statement Extraction, and Sentiment Analysis.)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1458\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1458\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1457","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1457\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1457\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1457\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1457","id":761232610,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1OTI5Mjg1","number":1457,"title":"add hrenwac_para","user":{"login":"IvanZidov","id":11391118,"node_id":"MDQ6VXNlcjExMzkxMTE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11391118?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/IvanZidov","html_url":"https:\/\/github.com\/IvanZidov","followers_url":"https:\/\/api.github.com\/users\/IvanZidov\/followers","following_url":"https:\/\/api.github.com\/users\/IvanZidov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/IvanZidov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/IvanZidov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/IvanZidov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/IvanZidov\/orgs","repos_url":"https:\/\/api.github.com\/users\/IvanZidov\/repos","events_url":"https:\/\/api.github.com\/users\/IvanZidov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/IvanZidov\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T13:16:20Z","updated_at":"2020-12-10T13:35:54Z","closed_at":"2020-12-10T13:35:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1457","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1457","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1457.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1457.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1457\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1457\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1456","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1456\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1456\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1456\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1456","id":761231296,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1OTI4MTc2","number":1456,"title":"Add CC100 Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T13:14:37Z","updated_at":"2020-12-14T10:20:09Z","closed_at":"2020-12-14T10:20:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1456","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1456","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1456.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1456.patch","merged_at":"2020-12-14T10:20:07Z"},"body":"Closes #773 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1456\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1456\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1455","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1455\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1455\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1455\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1455","id":761205073,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1OTA1OTQy","number":1455,"title":"Add HEAD-QA: A Healthcare Dataset for Complex Reasoning","user":{"login":"mariagrandury","id":57645283,"node_id":"MDQ6VXNlcjU3NjQ1Mjgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57645283?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariagrandury","html_url":"https:\/\/github.com\/mariagrandury","followers_url":"https:\/\/api.github.com\/users\/mariagrandury\/followers","following_url":"https:\/\/api.github.com\/users\/mariagrandury\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariagrandury\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariagrandury\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariagrandury\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariagrandury\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariagrandury\/repos","events_url":"https:\/\/api.github.com\/users\/mariagrandury\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariagrandury\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T12:36:56Z","updated_at":"2020-12-17T17:03:32Z","closed_at":"2020-12-17T16:58:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1455","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1455","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1455.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1455.patch","merged_at":"2020-12-17T16:58:11Z"},"body":"HEAD-QA is a multi-choice HEAlthcare Dataset, the questions come from exams to access a specialized position in the\r\nSpanish healthcare system.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1455\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1455\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1454","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1454\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1454\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1454\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1454","id":761199862,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1OTAxNjk4","number":1454,"title":"Add kinnews_kirnews","user":{"login":"saradhix","id":1351362,"node_id":"MDQ6VXNlcjEzNTEzNjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1351362?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/saradhix","html_url":"https:\/\/github.com\/saradhix","followers_url":"https:\/\/api.github.com\/users\/saradhix\/followers","following_url":"https:\/\/api.github.com\/users\/saradhix\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/saradhix\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/saradhix\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/saradhix\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/saradhix\/orgs","repos_url":"https:\/\/api.github.com\/users\/saradhix\/repos","events_url":"https:\/\/api.github.com\/users\/saradhix\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/saradhix\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T12:29:08Z","updated_at":"2020-12-17T18:34:16Z","closed_at":"2020-12-17T18:34:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1454","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1454","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1454.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1454.patch","merged_at":"2020-12-17T18:34:16Z"},"body":"Add kinnews and kirnews","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1454\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1454\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1453","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1453\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1453\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1453\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1453","id":761188657,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1ODkyNTM5","number":1453,"title":"Adding ethos dataset clean","user":{"login":"iamollas","id":22838900,"node_id":"MDQ6VXNlcjIyODM4OTAw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22838900?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iamollas","html_url":"https:\/\/github.com\/iamollas","followers_url":"https:\/\/api.github.com\/users\/iamollas\/followers","following_url":"https:\/\/api.github.com\/users\/iamollas\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iamollas\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iamollas\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iamollas\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iamollas\/orgs","repos_url":"https:\/\/api.github.com\/users\/iamollas\/repos","events_url":"https:\/\/api.github.com\/users\/iamollas\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iamollas\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-10T12:13:21Z","updated_at":"2020-12-14T15:00:46Z","closed_at":"2020-12-14T10:31:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1453","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1453","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1453.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1453.patch","merged_at":"2020-12-14T10:31:24Z"},"body":"I addressed the comments on the PR1318","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1453\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1453\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1452","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1452\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1452\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1452\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1452","id":761104924,"node_id":"MDU6SXNzdWU3NjExMDQ5MjQ=","number":1452,"title":"SNLI dataset contains labels with value -1","user":{"login":"aarnetalman","id":11405654,"node_id":"MDQ6VXNlcjExNDA1NjU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11405654?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aarnetalman","html_url":"https:\/\/github.com\/aarnetalman","followers_url":"https:\/\/api.github.com\/users\/aarnetalman\/followers","following_url":"https:\/\/api.github.com\/users\/aarnetalman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aarnetalman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aarnetalman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aarnetalman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aarnetalman\/orgs","repos_url":"https:\/\/api.github.com\/users\/aarnetalman\/repos","events_url":"https:\/\/api.github.com\/users\/aarnetalman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aarnetalman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-10T10:16:55Z","updated_at":"2020-12-10T17:49:55Z","closed_at":"2020-12-10T17:49:55Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nimport datasets\r\nnli_data = datasets.load_dataset(\"snli\")\r\ntrain_data = nli_data['train']\r\ntrain_labels = train_data['label']\r\nlabel_set = set(train_labels)\r\nprint(label_set)\r\n```\r\n\r\n**Output:**\r\n`{0, 1, 2, -1}`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1452\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1452\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1451","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1451\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1451\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1451\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1451","id":761102770,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1ODIwOTY3","number":1451,"title":"Add European Center for Disease Control and Preventions's (ECDC) Translation Memory dataset","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T10:14:20Z","updated_at":"2020-12-11T16:50:09Z","closed_at":"2020-12-11T16:50:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1451","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1451","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1451.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1451.patch","merged_at":"2020-12-11T16:50:09Z"},"body":"ECDC-TM homepage: https:\/\/ec.europa.eu\/jrc\/en\/language-technologies\/ecdc-translation-memory","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1451\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1451\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1450","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1450\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1450\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1450\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1450","id":761102429,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1ODIwNjg0","number":1450,"title":"Fix version in bible_para","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T10:13:55Z","updated_at":"2020-12-11T16:40:41Z","closed_at":"2020-12-11T16:40:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1450","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1450","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1450.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1450.patch","merged_at":"2020-12-11T16:40:40Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1450\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1450\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1449","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1449\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1449\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1449\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1449","id":761083210,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1ODA0MzEy","number":1449,"title":"add W&I + LOCNESS dataset (BEA-2019 workshop shared task on GEC) [PROPER]","user":{"login":"aseifert","id":4944799,"node_id":"MDQ6VXNlcjQ5NDQ3OTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944799?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aseifert","html_url":"https:\/\/github.com\/aseifert","followers_url":"https:\/\/api.github.com\/users\/aseifert\/followers","following_url":"https:\/\/api.github.com\/users\/aseifert\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aseifert\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aseifert\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aseifert\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aseifert\/orgs","repos_url":"https:\/\/api.github.com\/users\/aseifert\/repos","events_url":"https:\/\/api.github.com\/users\/aseifert\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aseifert\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-10T09:51:08Z","updated_at":"2020-12-11T17:07:46Z","closed_at":"2020-12-11T17:07:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1449","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1449","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1449.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1449.patch","merged_at":"2020-12-11T17:07:46Z"},"body":"- **Name:** W&I + LOCNESS dataset (from the BEA-2019 workshop shared task on GEC)\r\n- **Description:** https:\/\/www.cl.cam.ac.uk\/research\/nl\/bea2019st\/#data\r\n- **Paper:** https:\/\/www.aclweb.org\/anthology\/W19-4406\/\r\n- **Motivation:** This is a recent dataset (actually two in one) for grammatical error correction and is used for benchmarking in this field of NLP.\r\n\r\n### Checkbox\r\n\r\n- [x] Create the dataset script `\/datasets\/my_dataset\/my_dataset.py` using the template\r\n- [x] Fill the `_DESCRIPTION` and `_CITATION` variables\r\n- [x] Implement `_infos()`, `_split_generators()` and `_generate_examples()`\r\n- [x] Make sure that the `BUILDER_CONFIGS` class attribute is filled with the different configurations of the dataset and that the `BUILDER_CONFIG_CLASS` is specified if there is a custom config class.\r\n- [x] Generate the metadata file `dataset_infos.json` for all configurations\r\n- [x] Generate the dummy data `dummy_data.zip` files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card `README.md` using the template : fill the tags and the various paragraphs\r\n- [x] Both tests for the real data and the dummy data pass.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1449\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1449\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1448","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1448\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1448\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1448\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1448","id":761080776,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1ODAyNDM3","number":1448,"title":"add thai_toxicity_tweet","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T09:48:02Z","updated_at":"2020-12-11T16:21:27Z","closed_at":"2020-12-11T16:21:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1448","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1448","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1448.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1448.patch","merged_at":"2020-12-11T16:21:27Z"},"body":"Thai Toxicity Tweet Corpus contains 3,300 tweets (506 tweets with texts missing) annotated by humans with guidelines including a 44-word dictionary. The author obtained 2,027 and 1,273 toxic and non-toxic tweets, respectively; these were labeled by three annotators. The result of corpus analysis indicates that tweets that include toxic words are not always toxic. Further, it is more likely that a tweet is toxic, if it contains toxic words indicating their original meaning. Moreover, disagreements in annotation are primarily because of sarcasm, unclear existing target, and word sense ambiguity.\r\n\r\nNotes from data cleaner: The data is included into [huggingface\/datasets](https:\/\/www.github.com\/huggingface\/datasets) in Dec 2020. By this time, 506 of the tweets are not available publicly anymore. We denote these by `TWEET_NOT_FOUND` in `tweet_text`.\r\nProcessing can be found at [this PR](https:\/\/github.com\/tmu-nlp\/ThaiToxicityTweetCorpus\/pull\/1).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1448\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1448\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1447","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1447\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1447\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1447\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1447","id":761067955,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NzkxODk1","number":1447,"title":"Update step-by-step guide for windows","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T09:30:59Z","updated_at":"2020-12-10T12:18:47Z","closed_at":"2020-12-10T09:31:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1447","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1447","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1447.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1447.patch","merged_at":"2020-12-10T09:31:14Z"},"body":"Update step-by-step guide for windows to give an alternative to `make style`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1447\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1447\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1446","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1446\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1446\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1446\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1446","id":761060323,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1Nzg1NDk1","number":1446,"title":"Add Bing Coronavirus Query Set","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T09:20:46Z","updated_at":"2020-12-11T17:03:08Z","closed_at":"2020-12-11T17:03:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1446","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1446","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1446.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1446.patch","merged_at":"2020-12-11T17:03:07Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1446\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1446\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1445","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1445\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1445\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1445\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1445","id":761057851,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NzgzMzY2","number":1445,"title":"Added dataset clickbait_news_bg","user":{"login":"tsvm","id":1083319,"node_id":"MDQ6VXNlcjEwODMzMTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1083319?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tsvm","html_url":"https:\/\/github.com\/tsvm","followers_url":"https:\/\/api.github.com\/users\/tsvm\/followers","following_url":"https:\/\/api.github.com\/users\/tsvm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tsvm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tsvm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tsvm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tsvm\/orgs","repos_url":"https:\/\/api.github.com\/users\/tsvm\/repos","events_url":"https:\/\/api.github.com\/users\/tsvm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tsvm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-10T09:17:28Z","updated_at":"2020-12-15T07:45:19Z","closed_at":"2020-12-15T07:45:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1445","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1445","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1445.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1445.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1445\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1445\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1444","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1444\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1444\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1444\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1444","id":761055651,"node_id":"MDU6SXNzdWU3NjEwNTU2NTE=","number":1444,"title":"FileNotFound remotly, can't load a dataset","user":{"login":"sadakmed","id":18331629,"node_id":"MDQ6VXNlcjE4MzMxNjI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18331629?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sadakmed","html_url":"https:\/\/github.com\/sadakmed","followers_url":"https:\/\/api.github.com\/users\/sadakmed\/followers","following_url":"https:\/\/api.github.com\/users\/sadakmed\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sadakmed\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sadakmed\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sadakmed\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sadakmed\/orgs","repos_url":"https:\/\/api.github.com\/users\/sadakmed\/repos","events_url":"https:\/\/api.github.com\/users\/sadakmed\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sadakmed\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-10T09:14:47Z","updated_at":"2020-12-15T17:41:14Z","closed_at":"2020-12-15T17:41:14Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```py\r\n!pip install datasets\r\nimport datasets as ds\r\n\r\ncorpus = ds.load_dataset('large_spanish_corpus')\r\n```\r\ngives the error\r\n\r\n> FileNotFoundError: Couldn't find file locally at large_spanish_corpus\/large_spanish_corpus.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/large_spanish_corpus\/large_spanish_corpus.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/large_spanish_corpus\/large_spanish_corpus.py\r\n\r\nnot just `large_spanish_corpus`, `zest` too, but `squad` is available. \r\n\r\nthis was using colab and localy ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1444\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1444\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1443","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1443\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1443\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1443\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1443","id":761033061,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NzYyNTQ1","number":1443,"title":"Add OPUS Wikimedia Translations Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T08:43:02Z","updated_at":"2020-12-15T18:21:50Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1443","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1443","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1443.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1443.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1443\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1443\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1442","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1442\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1442\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1442\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1442","id":761026069,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NzU2Nzgx","number":1442,"title":"Create XML dummy data without loading all dataset in memory","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T08:32:07Z","updated_at":"2020-12-17T09:59:43Z","closed_at":"2020-12-17T09:59:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1442","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1442","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1442.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1442.patch","merged_at":"2020-12-17T09:59:43Z"},"body":"While I was adding one XML dataset, I noticed that all the dataset was loaded in memory during the dummy data generation process (using nearly all my laptop RAM).\r\n\r\nLooking at the code, I have found that the origin is the use of `ET.parse()`. This method loads **all the file content in memory**.\r\n\r\nIn order to fix this, I have refactorized the code and use `ET.iterparse()` instead, which **parses the file content incrementally**.\r\n\r\nI have also implemented a test.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1442\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1442\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1441","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1441\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1441\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1441\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1441","id":761021823,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NzUzMjI5","number":1441,"title":"Add Igbo-English Machine Translation Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T08:25:34Z","updated_at":"2020-12-11T15:54:53Z","closed_at":"2020-12-11T15:54:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1441","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1441","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1441.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1441.patch","merged_at":"2020-12-11T15:54:52Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1441\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1441\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1440","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1440\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1440\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1440\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1440","id":760973057,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NzEyNDY1","number":1440,"title":"Adding english plaintext jokes dataset","user":{"login":"purvimisal","id":22298787,"node_id":"MDQ6VXNlcjIyMjk4Nzg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22298787?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/purvimisal","html_url":"https:\/\/github.com\/purvimisal","followers_url":"https:\/\/api.github.com\/users\/purvimisal\/followers","following_url":"https:\/\/api.github.com\/users\/purvimisal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/purvimisal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/purvimisal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/purvimisal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/purvimisal\/orgs","repos_url":"https:\/\/api.github.com\/users\/purvimisal\/repos","events_url":"https:\/\/api.github.com\/users\/purvimisal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/purvimisal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-10T07:04:17Z","updated_at":"2020-12-13T05:22:00Z","closed_at":"2020-12-12T05:55:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1440","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1440","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1440.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1440.patch","merged_at":null},"body":"This PR adds a dataset of 200k English plaintext Jokes from three sources: Reddit, Stupidstuff, and Wocka.\r\nLink: https:\/\/github.com\/taivop\/joke-dataset \r\n\r\nThis is my second PR. \r\nFirst was: [#1269 ](https:\/\/github.com\/huggingface\/datasets\/pull\/1269)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1440\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1440\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1439","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1439\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1439\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1439\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1439","id":760968410,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NzA4NDU1","number":1439,"title":"Update README.md","user":{"login":"tuner007","id":46425391,"node_id":"MDQ6VXNlcjQ2NDI1Mzkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46425391?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tuner007","html_url":"https:\/\/github.com\/tuner007","followers_url":"https:\/\/api.github.com\/users\/tuner007\/followers","following_url":"https:\/\/api.github.com\/users\/tuner007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tuner007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tuner007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tuner007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tuner007\/orgs","repos_url":"https:\/\/api.github.com\/users\/tuner007\/repos","events_url":"https:\/\/api.github.com\/users\/tuner007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tuner007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T06:57:01Z","updated_at":"2020-12-11T15:22:53Z","closed_at":"2020-12-11T15:22:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1439","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1439","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1439.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1439.patch","merged_at":"2020-12-11T15:22:53Z"},"body":"1k-10k -> 1k-1M\r\n\r\n3 separate configs are available with min. 1K and max. 211.3k examples","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1439\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1439\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1438","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1438\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1438\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1438\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1438","id":760962193,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NzAzMTEw","number":1438,"title":"A descriptive name for my changes","user":{"login":"rahul-art","id":56379013,"node_id":"MDQ6VXNlcjU2Mzc5MDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56379013?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rahul-art","html_url":"https:\/\/github.com\/rahul-art","followers_url":"https:\/\/api.github.com\/users\/rahul-art\/followers","following_url":"https:\/\/api.github.com\/users\/rahul-art\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rahul-art\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rahul-art\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rahul-art\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rahul-art\/orgs","repos_url":"https:\/\/api.github.com\/users\/rahul-art\/repos","events_url":"https:\/\/api.github.com\/users\/rahul-art\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rahul-art\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-10T06:47:24Z","updated_at":"2020-12-15T10:36:27Z","closed_at":"2020-12-15T10:36:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1438","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1438","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1438.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1438.patch","merged_at":null},"body":"hind encorp resubmited","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1438\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1438\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1437","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1437\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1437\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1437\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1437","id":760891879,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NjQwODE0","number":1437,"title":"Add Indosum dataset","user":{"login":"prasastoadi","id":11614678,"node_id":"MDQ6VXNlcjExNjE0Njc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11614678?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/prasastoadi","html_url":"https:\/\/github.com\/prasastoadi","followers_url":"https:\/\/api.github.com\/users\/prasastoadi\/followers","following_url":"https:\/\/api.github.com\/users\/prasastoadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/prasastoadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/prasastoadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/prasastoadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/prasastoadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/prasastoadi\/repos","events_url":"https:\/\/api.github.com\/users\/prasastoadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/prasastoadi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T05:02:00Z","updated_at":"2020-12-17T09:51:19Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1437","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1437","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1437.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1437.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1437\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1437\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1436","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1436\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1436\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1436\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1436","id":760873132,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NjI1MDM0","number":1436,"title":"add ALT","user":{"login":"chameleonTK","id":6429850,"node_id":"MDQ6VXNlcjY0Mjk4NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6429850?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chameleonTK","html_url":"https:\/\/github.com\/chameleonTK","followers_url":"https:\/\/api.github.com\/users\/chameleonTK\/followers","following_url":"https:\/\/api.github.com\/users\/chameleonTK\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chameleonTK\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chameleonTK\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chameleonTK\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chameleonTK\/orgs","repos_url":"https:\/\/api.github.com\/users\/chameleonTK\/repos","events_url":"https:\/\/api.github.com\/users\/chameleonTK\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chameleonTK\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T04:17:21Z","updated_at":"2020-12-13T16:14:18Z","closed_at":"2020-12-11T15:52:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1436","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1436","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1436.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1436.patch","merged_at":"2020-12-11T15:52:41Z"},"body":"ALT dataset -- https:\/\/www2.nict.go.jp\/astrec-att\/member\/mutiyama\/ALT\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1436\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1436\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1435","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1435\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1435\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1435\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1435","id":760867325,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NjIwODE4","number":1435,"title":"Add FreebaseQA dataset","user":{"login":"anaerobeth","id":3663322,"node_id":"MDQ6VXNlcjM2NjMzMjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3663322?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anaerobeth","html_url":"https:\/\/github.com\/anaerobeth","followers_url":"https:\/\/api.github.com\/users\/anaerobeth\/followers","following_url":"https:\/\/api.github.com\/users\/anaerobeth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anaerobeth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anaerobeth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anaerobeth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anaerobeth\/orgs","repos_url":"https:\/\/api.github.com\/users\/anaerobeth\/repos","events_url":"https:\/\/api.github.com\/users\/anaerobeth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anaerobeth\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2020-12-10T04:03:27Z","updated_at":"2021-02-05T09:47:30Z","closed_at":"2021-02-05T09:47:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1435","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1435","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1435.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1435.patch","merged_at":null},"body":"This PR adds the FreebaseQA dataset: A Trivia-type QA Data Set over the Freebase Knowledge Graph\r\n\r\nRepo: https:\/\/github.com\/kelvin-jiang\/FreebaseQA\r\n\r\nPaper: https:\/\/www.aclweb.org\/anthology\/N19-1028.pdf\r\n\r\n\r\n## TODO: create dummy data\r\n\r\nError encountered when running `python datasets-cli dummy_data datasets\/freebase_qa --auto_generate`\r\n```\r\n f\"Couldn't parse columns {list(json_data.keys())}. \"\r\nValueError: Couldn't parse columns ['Dataset', 'Version', 'Questions']. Maybe specify which json field must be used to read the data with --json_field .\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1435\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1435\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1434","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1434\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1434\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1434\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1434","id":760821474,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTg3NjEx","number":1434,"title":"add_sofc_materials_articles","user":{"login":"ZacharySBrown","id":7950786,"node_id":"MDQ6VXNlcjc5NTA3ODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7950786?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZacharySBrown","html_url":"https:\/\/github.com\/ZacharySBrown","followers_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/followers","following_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/repos","events_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T02:15:02Z","updated_at":"2020-12-17T09:59:54Z","closed_at":"2020-12-17T09:59:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1434","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1434","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1434.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1434.patch","merged_at":"2020-12-17T09:59:54Z"},"body":"adding [SOFC-Exp Corpus](https:\/\/arxiv.org\/abs\/2006.03039)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1434\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1434\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1433","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1433\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1433\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1433\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1433","id":760813539,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTgxNzE3","number":1433,"title":"Adding the ASSIN 2 dataset","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-10T01:57:02Z","updated_at":"2020-12-11T14:32:56Z","closed_at":"2020-12-11T14:32:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1433","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1433","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1433.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1433.patch","merged_at":"2020-12-11T14:32:56Z"},"body":"Adding the ASSIN 2 dataset, a Portuguese language dataset for Natural Language Inference and Semantic Similarity Scoring","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1433\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1433\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1432","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1432\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1432\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1432\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1432","id":760808449,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTc3ODk3","number":1432,"title":"Adding journalists questions dataset","user":{"login":"MaramHasanain","id":3918663,"node_id":"MDQ6VXNlcjM5MTg2NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3918663?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MaramHasanain","html_url":"https:\/\/github.com\/MaramHasanain","followers_url":"https:\/\/api.github.com\/users\/MaramHasanain\/followers","following_url":"https:\/\/api.github.com\/users\/MaramHasanain\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MaramHasanain\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MaramHasanain\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MaramHasanain\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MaramHasanain\/orgs","repos_url":"https:\/\/api.github.com\/users\/MaramHasanain\/repos","events_url":"https:\/\/api.github.com\/users\/MaramHasanain\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MaramHasanain\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-10T01:44:47Z","updated_at":"2020-12-14T13:51:05Z","closed_at":"2020-12-14T13:51:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1432","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1432","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1432.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1432.patch","merged_at":"2020-12-14T13:51:04Z"},"body":"This is my first dataset to be added to HF. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1432\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1432\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1431","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1431\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1431\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1431\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1431","id":760791019,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTYzOTk1","number":1431,"title":"Ar cov19","user":{"login":"Fatima-Haouari","id":71061623,"node_id":"MDQ6VXNlcjcxMDYxNjIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/71061623?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Fatima-Haouari","html_url":"https:\/\/github.com\/Fatima-Haouari","followers_url":"https:\/\/api.github.com\/users\/Fatima-Haouari\/followers","following_url":"https:\/\/api.github.com\/users\/Fatima-Haouari\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Fatima-Haouari\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Fatima-Haouari\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Fatima-Haouari\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Fatima-Haouari\/orgs","repos_url":"https:\/\/api.github.com\/users\/Fatima-Haouari\/repos","events_url":"https:\/\/api.github.com\/users\/Fatima-Haouari\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Fatima-Haouari\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-10T00:59:34Z","updated_at":"2020-12-11T15:01:23Z","closed_at":"2020-12-11T15:01:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1431","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1431","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1431.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1431.patch","merged_at":"2020-12-11T15:01:23Z"},"body":"Adding ArCOV-19 dataset. ArCOV-19 is an Arabic COVID-19 Twitter dataset that covers the period from 27th of January till 30th of April 2020. ArCOV-19 is the first publicly-available Arabic Twitter dataset covering COVID-19 pandemic that includes over 1M tweets alongside the propagation networks of the most-popular subset of them (i.e., most-retweeted and-liked). The propagation networks include both retweets and conversational threads (i.e., threads of replies). ArCOV-19 is designed to enable research under several domains including natural language processing, information retrieval, and social computing, among others. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1431\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1431\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1430","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1430\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1430\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1430\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1430","id":760779666,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTU0Njg0","number":1430,"title":"Add 1.5 billion words Arabic corpus ","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-12-10T00:32:18Z","updated_at":"2020-12-22T10:03:59Z","closed_at":"2020-12-22T10:03:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1430","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1430","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1430.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1430.patch","merged_at":"2020-12-22T10:03:59Z"},"body":"Needs https:\/\/github.com\/huggingface\/datasets\/pull\/1429 to work. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1430\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1430\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1429","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1429\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1429\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1429\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1429","id":760737818,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTE5MjY5","number":1429,"title":"extract rar files","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T23:01:10Z","updated_at":"2020-12-18T15:03:37Z","closed_at":"2020-12-18T15:03:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1429","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1429","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1429.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1429.patch","merged_at":"2020-12-18T15:03:37Z"},"body":"Unfortunately, I didn't find any native python libraries for extracting rar files. The user has to manually install `sudo apt-get install unrar`. Discussion with @yjernite is in the slack channel. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1429\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1429\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1428","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1428\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1428\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1428\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1428","id":760736726,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTE4MzIy","number":1428,"title":"Add twi wordsim353","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T22:59:19Z","updated_at":"2020-12-11T13:57:32Z","closed_at":"2020-12-11T13:57:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1428","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1428","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1428.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1428.patch","merged_at":"2020-12-11T13:57:32Z"},"body":"Add twi WordSim 353","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1428\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1428\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1427","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1427\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1427\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1427\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1427","id":760736703,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTE4MzAx","number":1427,"title":"Hebrew project BenYehuda","user":{"login":"imvladikon","id":10088963,"node_id":"MDQ6VXNlcjEwMDg4OTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10088963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/imvladikon","html_url":"https:\/\/github.com\/imvladikon","followers_url":"https:\/\/api.github.com\/users\/imvladikon\/followers","following_url":"https:\/\/api.github.com\/users\/imvladikon\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/imvladikon\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/imvladikon\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/imvladikon\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/imvladikon\/orgs","repos_url":"https:\/\/api.github.com\/users\/imvladikon\/repos","events_url":"https:\/\/api.github.com\/users\/imvladikon\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/imvladikon\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T22:59:17Z","updated_at":"2020-12-11T17:39:23Z","closed_at":"2020-12-11T17:39:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1427","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1427","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1427.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1427.patch","merged_at":"2020-12-11T17:39:23Z"},"body":"Added Hebrew corpus from https:\/\/github.com\/projectbenyehuda\/public_domain_dump","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1427\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1427\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1426","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1426\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1426\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1426\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1426","id":760735763,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTE3NDc4","number":1426,"title":"init commit for MultiReQA for third PR with all issues fixed","user":{"login":"Karthik-Bhaskar","id":13200370,"node_id":"MDQ6VXNlcjEzMjAwMzcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13200370?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar","html_url":"https:\/\/github.com\/Karthik-Bhaskar","followers_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/followers","following_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/orgs","repos_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/repos","events_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T22:57:41Z","updated_at":"2020-12-11T13:37:08Z","closed_at":"2020-12-11T13:37:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1426","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1426","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1426.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1426.patch","merged_at":"2020-12-11T13:37:08Z"},"body":"3rd PR w.r.t. PR #1349 with all the issues fixed. As #1349 had uploaded other files along with the multi_re_qa dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1426\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1426\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1425","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1425\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1425\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1425\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1425","id":760733638,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTE1NjQz","number":1425,"title":"Add german common crawl dataset","user":{"login":"Phil1108","id":39518904,"node_id":"MDQ6VXNlcjM5NTE4OTA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39518904?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Phil1108","html_url":"https:\/\/github.com\/Phil1108","followers_url":"https:\/\/api.github.com\/users\/Phil1108\/followers","following_url":"https:\/\/api.github.com\/users\/Phil1108\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Phil1108\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Phil1108\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Phil1108\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Phil1108\/orgs","repos_url":"https:\/\/api.github.com\/users\/Phil1108\/repos","events_url":"https:\/\/api.github.com\/users\/Phil1108\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Phil1108\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-09T22:54:12Z","updated_at":"2020-12-23T09:59:47Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1425","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1425","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1425.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1425.patch","merged_at":null},"body":"Adding a subpart of the Common Crawl which was extracted with this repo https:\/\/github.com\/facebookresearch\/cc_net and additionally filtered for duplicates ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1425\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1425\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1424","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1424\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1424\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1424\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1424","id":760724914,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NTA4MjY5","number":1424,"title":"Add yoruba wordsim353","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T22:37:42Z","updated_at":"2020-12-09T22:39:45Z","closed_at":"2020-12-09T22:39:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1424","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1424","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1424.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1424.patch","merged_at":null},"body":"Added WordSim-353 evaluation dataset for Yoruba","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1424\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1424\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1423","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1423\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1423\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1423\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1423","id":760712421,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDk3OTk5","number":1423,"title":"Imppres","user":{"login":"aclifton314","id":53267795,"node_id":"MDQ6VXNlcjUzMjY3Nzk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53267795?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aclifton314","html_url":"https:\/\/github.com\/aclifton314","followers_url":"https:\/\/api.github.com\/users\/aclifton314\/followers","following_url":"https:\/\/api.github.com\/users\/aclifton314\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aclifton314\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aclifton314\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aclifton314\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aclifton314\/orgs","repos_url":"https:\/\/api.github.com\/users\/aclifton314\/repos","events_url":"https:\/\/api.github.com\/users\/aclifton314\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aclifton314\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-12-09T22:14:12Z","updated_at":"2020-12-17T18:27:14Z","closed_at":"2020-12-17T18:27:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1423","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1423","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1423.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1423.patch","merged_at":"2020-12-17T18:27:14Z"},"body":"2nd PR ever! Hopefully I'm starting to get the hang of this. This is for the IMPPRES dataset. Please let me know of any corrections or changes that need to be made.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1423\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":2,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1423\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1422","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1422\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1422\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1422\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1422","id":760707113,"node_id":"MDU6SXNzdWU3NjA3MDcxMTM=","number":1422,"title":"Can't map dataset (loaded from csv)","user":{"login":"SolomidHero","id":28161779,"node_id":"MDQ6VXNlcjI4MTYxNzc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28161779?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SolomidHero","html_url":"https:\/\/github.com\/SolomidHero","followers_url":"https:\/\/api.github.com\/users\/SolomidHero\/followers","following_url":"https:\/\/api.github.com\/users\/SolomidHero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SolomidHero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SolomidHero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SolomidHero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SolomidHero\/orgs","repos_url":"https:\/\/api.github.com\/users\/SolomidHero\/repos","events_url":"https:\/\/api.github.com\/users\/SolomidHero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SolomidHero\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T22:05:42Z","updated_at":"2020-12-17T18:13:40Z","closed_at":"2020-12-17T18:13:40Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello! I am trying to load single csv file with two columns: ('label': str, 'text' str), where is label is str of two possible classes.\r\n\r\nBelow steps are similar with [this notebook](https:\/\/colab.research.google.com\/drive\/1-JIJlao4dI-Ilww_NnTc0rxtp-ymgDgM?usp=sharing), where bert model and tokenizer are used to classify lmdb loaded dataset. Only one difference it is the dataset loaded from .csv file.\r\nHere is how I load it:\r\n\r\n```python\r\ndata_path = 'data.csv'\r\ndata = pd.read_csv(data_path)\r\n\r\n# process class name to indices\r\nclasses = ['neg', 'pos']\r\nclass_to_idx = { cl: i for i, cl in enumerate(classes) }\r\n\r\n# now data is like {'label': int, 'text' str}\r\ndata['label'] = data['label'].apply(lambda x: class_to_idx[x])\r\n\r\n# load dataset and map it with defined `tokenize` function\r\nfeatures = Features({\r\n target: ClassLabel(num_classes=2, names=['neg', 'pos'], names_file=None, id=None),\r\n feature: Value(dtype='string', id=None),\r\n})\r\ndataset = Dataset.from_pandas(data, features=features)\r\ndataset.map(tokenize, batched=True, batch_size=len(dataset))\r\n```\r\n\r\nIt ruins on the last line with following error:\r\n```\r\n---------------------------------------------------------------------------\r\nAssertionError Traceback (most recent call last)\r\n in ()\r\n 9 })\r\n 10 dataset = Dataset.from_pandas(data, features=features)\r\n---> 11 dataset.map(tokenizer, batched=True, batch_size=len(dataset))\r\n\r\n2 frames\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)\r\n 1237 test_inputs = self[:2] if batched else self[0]\r\n 1238 test_indices = [0, 1] if batched else 0\r\n-> 1239 update_data = does_function_return_dict(test_inputs, test_indices)\r\n 1240 logger.info(\"Testing finished, running the mapping function on the dataset\")\r\n 1241 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_dataset.py in does_function_return_dict(inputs, indices)\r\n 1208 fn_args = [inputs] if input_columns is None else [inputs[col] for col in input_columns]\r\n 1209 processed_inputs = (\r\n-> 1210 function(*fn_args, indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n 1211 )\r\n 1212 does_return_dict = isinstance(processed_inputs, Mapping)\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/transformers\/tokenization_utils_base.py in __call__(self, text, text_pair, add_special_tokens, padding, truncation, max_length, stride, is_split_into_words, pad_to_multiple_of, return_tensors, return_token_type_ids, return_attention_mask, return_overflowing_tokens, return_special_tokens_mask, return_offsets_mapping, return_length, verbose, **kwargs)\r\n 2281 )\r\n 2282 ), (\r\n-> 2283 \"text input must of type `str` (single example), `List[str]` (batch or single pretokenized example) \"\r\n 2284 \"or `List[List[str]]` (batch of pretokenized examples).\"\r\n 2285 )\r\n\r\nAssertionError: text input must of type `str` (single example), `List[str]` (batch or single pretokenized example) or `List[List[str]]` (batch of pretokenized examples).\r\n```\r\n\r\nwhich I think is not expected. I also tried the same steps using `Dataset.from_csv` which resulted in the same error.\r\n\r\nFor reproducing this, I used [this dataset from kaggle](https:\/\/www.kaggle.com\/team-ai\/spam-text-message-classification)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1422\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1422\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1421","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1421\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1421\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1421\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1421","id":760706851,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDkzMzU4","number":1421,"title":"adding fake-news-english-2","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T22:05:13Z","updated_at":"2020-12-13T00:48:49Z","closed_at":"2020-12-13T00:48:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1421","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1421","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1421.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1421.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1421\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1421\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1420","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1420\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1420\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1420\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1420","id":760700388,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDg4MTM5","number":1420,"title":"Add dataset yoruba_wordsim353","user":{"login":"michael-aloys","id":1858628,"node_id":"MDQ6VXNlcjE4NTg2Mjg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1858628?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/michael-aloys","html_url":"https:\/\/github.com\/michael-aloys","followers_url":"https:\/\/api.github.com\/users\/michael-aloys\/followers","following_url":"https:\/\/api.github.com\/users\/michael-aloys\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/michael-aloys\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/michael-aloys\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/michael-aloys\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/michael-aloys\/orgs","repos_url":"https:\/\/api.github.com\/users\/michael-aloys\/repos","events_url":"https:\/\/api.github.com\/users\/michael-aloys\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/michael-aloys\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T21:54:29Z","updated_at":"2020-12-11T13:34:04Z","closed_at":"2020-12-11T13:34:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1420","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1420","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1420.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1420.patch","merged_at":"2020-12-11T13:34:04Z"},"body":"Contains loading script as well as dataset card including YAML tags.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1420\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1420\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1419","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1419\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1419\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1419\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1419","id":760673716,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDY1OTA4","number":1419,"title":"Add Turkish News Category Dataset (270K)","user":{"login":"basakbuluz","id":41359672,"node_id":"MDQ6VXNlcjQxMzU5Njcy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/41359672?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/basakbuluz","html_url":"https:\/\/github.com\/basakbuluz","followers_url":"https:\/\/api.github.com\/users\/basakbuluz\/followers","following_url":"https:\/\/api.github.com\/users\/basakbuluz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/basakbuluz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/basakbuluz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/basakbuluz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/basakbuluz\/orgs","repos_url":"https:\/\/api.github.com\/users\/basakbuluz\/repos","events_url":"https:\/\/api.github.com\/users\/basakbuluz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/basakbuluz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-09T21:08:33Z","updated_at":"2020-12-11T14:02:31Z","closed_at":"2020-12-11T14:02:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1419","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1419","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1419.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1419.patch","merged_at":null},"body":"This PR adds the Turkish News Categories Dataset (270K) dataset which is a text classification dataset by me and @yavuzKomecoglu. Turkish news dataset consisting of **273601 news** in **17 categories**, compiled from printed media and news websites between 2010 and 2017 by the [Interpress](https:\/\/www.interpress.com\/) media monitoring company.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1419\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1419\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1418","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1418\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1418\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1418\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1418","id":760672320,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDY0NzQ4","number":1418,"title":"Add arabic dialects","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T21:06:07Z","updated_at":"2020-12-17T09:40:56Z","closed_at":"2020-12-17T09:40:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1418","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1418","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1418.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1418.patch","merged_at":"2020-12-17T09:40:56Z"},"body":"Data loading script and dataset card for Dialectal Arabic Resources dataset. \r\nFixed git issues from PR #976","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1418\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1418\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1417","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1417\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1417\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1417\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1417","id":760660918,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDU1NzM3","number":1417,"title":"WIP: Vinay\/add peer read dataset","user":{"login":"vinaykudari","id":34424769,"node_id":"MDQ6VXNlcjM0NDI0NzY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34424769?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vinaykudari","html_url":"https:\/\/github.com\/vinaykudari","followers_url":"https:\/\/api.github.com\/users\/vinaykudari\/followers","following_url":"https:\/\/api.github.com\/users\/vinaykudari\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vinaykudari\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vinaykudari\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vinaykudari\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vinaykudari\/orgs","repos_url":"https:\/\/api.github.com\/users\/vinaykudari\/repos","events_url":"https:\/\/api.github.com\/users\/vinaykudari\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vinaykudari\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T20:49:52Z","updated_at":"2020-12-11T18:43:31Z","closed_at":"2020-12-11T18:43:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1417","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1417","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1417.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1417.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1417\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1417\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1416","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1416\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1416\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1416\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1416","id":760653971,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDUwMTIz","number":1416,"title":"Add Shrinked Turkish NER from Kaggle.","user":{"login":"bhctsntrk","id":22636672,"node_id":"MDQ6VXNlcjIyNjM2Njcy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22636672?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhctsntrk","html_url":"https:\/\/github.com\/bhctsntrk","followers_url":"https:\/\/api.github.com\/users\/bhctsntrk\/followers","following_url":"https:\/\/api.github.com\/users\/bhctsntrk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhctsntrk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhctsntrk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhctsntrk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhctsntrk\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhctsntrk\/repos","events_url":"https:\/\/api.github.com\/users\/bhctsntrk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhctsntrk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T20:38:35Z","updated_at":"2020-12-11T11:23:31Z","closed_at":"2020-12-11T11:23:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1416","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1416","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1416.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1416.patch","merged_at":"2020-12-11T11:23:31Z"},"body":"Add Shrinked Turkish NER from [Kaggle](https:\/\/www.kaggle.com\/behcetsenturk\/shrinked-twnertc-turkish-ner-data-by-kuzgunlar).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1416\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1416\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1415","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1415\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1415\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1415\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1415","id":760642786,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDQxMTQx","number":1415,"title":"Add Hate Speech and Offensive Language Detection dataset","user":{"login":"hugoabonizio","id":1206395,"node_id":"MDQ6VXNlcjEyMDYzOTU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1206395?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hugoabonizio","html_url":"https:\/\/github.com\/hugoabonizio","followers_url":"https:\/\/api.github.com\/users\/hugoabonizio\/followers","following_url":"https:\/\/api.github.com\/users\/hugoabonizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hugoabonizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hugoabonizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hugoabonizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hugoabonizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/hugoabonizio\/repos","events_url":"https:\/\/api.github.com\/users\/hugoabonizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hugoabonizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-09T20:22:12Z","updated_at":"2020-12-14T18:06:44Z","closed_at":"2020-12-14T16:25:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1415","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1415","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1415.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1415.patch","merged_at":"2020-12-14T16:25:31Z"},"body":"Add [Hate Speech and Offensive Language Detection dataset](https:\/\/github.com\/t-davidson\/hate-speech-and-offensive-language) from [this paper](https:\/\/arxiv.org\/abs\/1703.04009).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1415\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1415\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1414","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1414\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1414\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1414\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1414","id":760622133,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDIzODgy","number":1414,"title":"Adding BioCreative II Gene Mention corpus","user":{"login":"mahajandiwakar","id":10516432,"node_id":"MDQ6VXNlcjEwNTE2NDMy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10516432?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mahajandiwakar","html_url":"https:\/\/github.com\/mahajandiwakar","followers_url":"https:\/\/api.github.com\/users\/mahajandiwakar\/followers","following_url":"https:\/\/api.github.com\/users\/mahajandiwakar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mahajandiwakar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mahajandiwakar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mahajandiwakar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mahajandiwakar\/orgs","repos_url":"https:\/\/api.github.com\/users\/mahajandiwakar\/repos","events_url":"https:\/\/api.github.com\/users\/mahajandiwakar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mahajandiwakar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T19:49:28Z","updated_at":"2020-12-11T11:17:40Z","closed_at":"2020-12-11T11:17:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1414","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1414","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1414.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1414.patch","merged_at":"2020-12-11T11:17:40Z"},"body":"Adding BioCreative II Gene Mention corpus","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1414\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1414\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1413","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1413\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1413\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1413\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1413","id":760615090,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDE4MDY2","number":1413,"title":"Add OffComBR","user":{"login":"hugoabonizio","id":1206395,"node_id":"MDQ6VXNlcjEyMDYzOTU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1206395?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hugoabonizio","html_url":"https:\/\/github.com\/hugoabonizio","followers_url":"https:\/\/api.github.com\/users\/hugoabonizio\/followers","following_url":"https:\/\/api.github.com\/users\/hugoabonizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hugoabonizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hugoabonizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hugoabonizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hugoabonizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/hugoabonizio\/repos","events_url":"https:\/\/api.github.com\/users\/hugoabonizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hugoabonizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-09T19:38:08Z","updated_at":"2020-12-14T18:06:45Z","closed_at":"2020-12-14T16:51:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1413","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1413","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1413.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1413.patch","merged_at":"2020-12-14T16:51:10Z"},"body":"Add [OffComBR](https:\/\/github.com\/rogersdepelle\/OffComBR) from [Offensive Comments in the Brazilian Web: a dataset and baseline results](https:\/\/sol.sbc.org.br\/index.php\/brasnam\/article\/view\/3260\/3222) paper.\r\n\r\nBut I'm having a hard time generating dummy data since the original dataset extion is `.arff` and the [_create_dummy_data function](https:\/\/github.com\/huggingface\/datasets\/blob\/a4aeaf911240057286a01bff1b1d75a89aedd57b\/src\/datasets\/commands\/dummy_data.py#L185) doesn't allow it.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1413\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1413\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1412","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1412\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1412\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1412\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1412","id":760607959,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDEyMDg2","number":1412,"title":"Adding the ASSIN dataset","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T19:27:06Z","updated_at":"2020-12-11T10:41:10Z","closed_at":"2020-12-11T10:41:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1412","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1412","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1412.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1412.patch","merged_at":"2020-12-11T10:41:10Z"},"body":"Adding the ASSIN dataset, a Portuguese language dataset for Natural Language Inference and Semantic Similarity Scoring","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1412\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1412\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1411","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1411\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1411\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1411\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1411","id":760606290,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDEwNjU3","number":1411,"title":"2 typos","user":{"login":"dezow","id":47401160,"node_id":"MDQ6VXNlcjQ3NDAxMTYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47401160?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dezow","html_url":"https:\/\/github.com\/dezow","followers_url":"https:\/\/api.github.com\/users\/dezow\/followers","following_url":"https:\/\/api.github.com\/users\/dezow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dezow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dezow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dezow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dezow\/orgs","repos_url":"https:\/\/api.github.com\/users\/dezow\/repos","events_url":"https:\/\/api.github.com\/users\/dezow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dezow\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T19:24:34Z","updated_at":"2020-12-11T10:39:05Z","closed_at":"2020-12-11T10:39:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1411","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1411","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1411.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1411.patch","merged_at":"2020-12-11T10:39:05Z"},"body":"Corrected 2 typos","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1411\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1411\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1410","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1410\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1410\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1410\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1410","id":760597092,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1NDAyNjcw","number":1410,"title":"Add penn treebank dataset","user":{"login":"harshalmittal4","id":24206326,"node_id":"MDQ6VXNlcjI0MjA2MzI2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24206326?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/harshalmittal4","html_url":"https:\/\/github.com\/harshalmittal4","followers_url":"https:\/\/api.github.com\/users\/harshalmittal4\/followers","following_url":"https:\/\/api.github.com\/users\/harshalmittal4\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/harshalmittal4\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/harshalmittal4\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/harshalmittal4\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/harshalmittal4\/orgs","repos_url":"https:\/\/api.github.com\/users\/harshalmittal4\/repos","events_url":"https:\/\/api.github.com\/users\/harshalmittal4\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/harshalmittal4\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T19:11:33Z","updated_at":"2020-12-16T09:38:23Z","closed_at":"2020-12-16T09:38:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1410","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1410","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1410.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1410.patch","merged_at":"2020-12-16T09:38:23Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1410\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1410\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1409","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1409\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1409\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1409\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1409","id":760593932,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1Mzk5OTI1","number":1409,"title":"Adding the ASSIN dataset","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T19:07:00Z","updated_at":"2020-12-09T19:18:12Z","closed_at":"2020-12-09T19:15:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1409","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1409","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1409.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1409.patch","merged_at":null},"body":"Adding the ASSIN dataset, a Portuguese language dataset for Natural Language Inference and Semantic Similarity Scoring","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1409\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1409\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1408","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1408\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1408\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1408\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1408","id":760590589,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1Mzk3MTAw","number":1408,"title":"adding fake-news-english","user":{"login":"MisbahKhan789","id":15351802,"node_id":"MDQ6VXNlcjE1MzUxODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15351802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MisbahKhan789","html_url":"https:\/\/github.com\/MisbahKhan789","followers_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/followers","following_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/orgs","repos_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/repos","events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MisbahKhan789\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T19:02:07Z","updated_at":"2020-12-13T00:49:19Z","closed_at":"2020-12-13T00:49:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1408","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1408","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1408.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1408.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1408\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1408\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1407","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1407\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1407\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1407\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1407","id":760581756,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1Mzg5ODQx","number":1407,"title":"Add Tweet Eval Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-09T18:48:57Z","updated_at":"2021-02-26T08:54:04Z","closed_at":"2021-02-26T08:54:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1407","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1407","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1407.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1407.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1407\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1407\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1406","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1406\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1406\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1406\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1406","id":760581330,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1Mzg5NDk5","number":1406,"title":"Add Portuguese Hate Speech dataset","user":{"login":"hugoabonizio","id":1206395,"node_id":"MDQ6VXNlcjEyMDYzOTU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1206395?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hugoabonizio","html_url":"https:\/\/github.com\/hugoabonizio","followers_url":"https:\/\/api.github.com\/users\/hugoabonizio\/followers","following_url":"https:\/\/api.github.com\/users\/hugoabonizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hugoabonizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hugoabonizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hugoabonizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hugoabonizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/hugoabonizio\/repos","events_url":"https:\/\/api.github.com\/users\/hugoabonizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hugoabonizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T18:48:16Z","updated_at":"2020-12-14T18:06:42Z","closed_at":"2020-12-14T16:22:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1406","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1406","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1406.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1406.patch","merged_at":"2020-12-14T16:22:20Z"},"body":"Binary Portuguese Hate Speech dataset from [this paper](https:\/\/www.aclweb.org\/anthology\/W19-3510\/).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1406\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1406\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1405","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1405\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1405\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1405\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1405","id":760578035,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1Mzg2ODA1","number":1405,"title":"Adding TaPaCo Dataset with README.md","user":{"login":"pacman100","id":13534540,"node_id":"MDQ6VXNlcjEzNTM0NTQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13534540?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pacman100","html_url":"https:\/\/github.com\/pacman100","followers_url":"https:\/\/api.github.com\/users\/pacman100\/followers","following_url":"https:\/\/api.github.com\/users\/pacman100\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pacman100\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pacman100\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pacman100\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pacman100\/orgs","repos_url":"https:\/\/api.github.com\/users\/pacman100\/repos","events_url":"https:\/\/api.github.com\/users\/pacman100\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pacman100\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T18:42:58Z","updated_at":"2020-12-13T19:11:18Z","closed_at":"2020-12-13T19:11:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1405","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1405","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1405.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1405.patch","merged_at":"2020-12-13T19:11:18Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1405\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1405\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1404","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1404\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1404\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1404\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1404","id":760575473,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1Mzg0NzEz","number":1404,"title":"Add Acronym Identification Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T18:38:54Z","updated_at":"2020-12-14T13:12:01Z","closed_at":"2020-12-14T13:12:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1404","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1404","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1404.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1404.patch","merged_at":"2020-12-14T13:12:00Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1404\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1404\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1403","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1403\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1403\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1403\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1403","id":760571419,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MzgxMzQ3","number":1403,"title":"Add dataset clickbait_news_bg","user":{"login":"tsvm","id":1083319,"node_id":"MDQ6VXNlcjEwODMzMTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1083319?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tsvm","html_url":"https:\/\/github.com\/tsvm","followers_url":"https:\/\/api.github.com\/users\/tsvm\/followers","following_url":"https:\/\/api.github.com\/users\/tsvm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tsvm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tsvm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tsvm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tsvm\/orgs","repos_url":"https:\/\/api.github.com\/users\/tsvm\/repos","events_url":"https:\/\/api.github.com\/users\/tsvm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tsvm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T18:32:12Z","updated_at":"2020-12-10T09:16:44Z","closed_at":"2020-12-10T09:16:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1403","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1403","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1403.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1403.patch","merged_at":null},"body":"Adding a new dataset - clickbait_news_bg","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1403\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1403\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1402","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1402\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1402\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1402\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1402","id":760538325,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MzUzMzE0","number":1402,"title":"adding covid-tweets-japanese (again)","user":{"login":"forest1988","id":2755894,"node_id":"MDQ6VXNlcjI3NTU4OTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2755894?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/forest1988","html_url":"https:\/\/github.com\/forest1988","followers_url":"https:\/\/api.github.com\/users\/forest1988\/followers","following_url":"https:\/\/api.github.com\/users\/forest1988\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/forest1988\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/forest1988\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/forest1988\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/forest1988\/orgs","repos_url":"https:\/\/api.github.com\/users\/forest1988\/repos","events_url":"https:\/\/api.github.com\/users\/forest1988\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/forest1988\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-09T17:46:46Z","updated_at":"2020-12-13T17:54:14Z","closed_at":"2020-12-13T17:47:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1402","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1402","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1402.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1402.patch","merged_at":"2020-12-13T17:47:36Z"},"body":"I had mistaken use git rebase, I was so hurried to fix it. However, I didn't fully consider the use of git reset , so I unintendedly stopped PR (#1367) altogether. Sorry about that.\r\nI'll make a new PR.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1402\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1402\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1401","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1401\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1401\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1401\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1401","id":760525949,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MzQyOTY2","number":1401,"title":"Add reasoning_bg","user":{"login":"saradhix","id":1351362,"node_id":"MDQ6VXNlcjEzNTEzNjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1351362?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/saradhix","html_url":"https:\/\/github.com\/saradhix","followers_url":"https:\/\/api.github.com\/users\/saradhix\/followers","following_url":"https:\/\/api.github.com\/users\/saradhix\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/saradhix\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/saradhix\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/saradhix\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/saradhix\/orgs","repos_url":"https:\/\/api.github.com\/users\/saradhix\/repos","events_url":"https:\/\/api.github.com\/users\/saradhix\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/saradhix\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-09T17:30:49Z","updated_at":"2020-12-17T16:50:43Z","closed_at":"2020-12-17T16:50:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1401","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1401","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1401.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1401.patch","merged_at":"2020-12-17T16:50:42Z"},"body":"Adding reading comprehension dataset for Bulgarian language","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1401\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1401\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1400","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1400\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1400\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1400\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1400","id":760514215,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MzMzMDYz","number":1400,"title":"Add European Union Education and Culture Translation Memory (EAC-TM) dataset","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T17:14:52Z","updated_at":"2020-12-14T13:06:48Z","closed_at":"2020-12-14T13:06:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1400","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1400","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1400.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1400.patch","merged_at":"2020-12-14T13:06:47Z"},"body":"Adding the EAC Translation Memory dataset : https:\/\/ec.europa.eu\/jrc\/en\/language-technologies\/eac-translation-memory","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1400\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1400\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1399","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1399\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1399\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1399\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1399","id":760499576,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MzIwNzA2","number":1399,"title":"Add HoVer Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T16:55:39Z","updated_at":"2020-12-14T10:57:23Z","closed_at":"2020-12-14T10:57:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1399","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1399","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1399.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1399.patch","merged_at":"2020-12-14T10:57:22Z"},"body":"HoVer: A Dataset for Many-Hop Fact Extraction And Claim Verification\r\nhttps:\/\/arxiv.org\/abs\/2011.03088 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1399\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1399\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1398","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1398\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1398\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1398\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1398","id":760497024,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MzE4NTg5","number":1398,"title":"Add Neural Code Search Dataset","user":{"login":"vinaykudari","id":34424769,"node_id":"MDQ6VXNlcjM0NDI0NzY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34424769?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vinaykudari","html_url":"https:\/\/github.com\/vinaykudari","followers_url":"https:\/\/api.github.com\/users\/vinaykudari\/followers","following_url":"https:\/\/api.github.com\/users\/vinaykudari\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vinaykudari\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vinaykudari\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vinaykudari\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vinaykudari\/orgs","repos_url":"https:\/\/api.github.com\/users\/vinaykudari\/repos","events_url":"https:\/\/api.github.com\/users\/vinaykudari\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vinaykudari\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-09T16:52:16Z","updated_at":"2020-12-09T18:02:27Z","closed_at":"2020-12-09T18:02:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1398","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1398","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1398.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1398.patch","merged_at":"2020-12-09T18:02:27Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1398\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1398\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1397","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1397\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1397\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1397\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1397","id":760467501,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1Mjk0MDgz","number":1397,"title":"datasets card-creator link added","user":{"login":"tanmoyio","id":33005287,"node_id":"MDQ6VXNlcjMzMDA1Mjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33005287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tanmoyio","html_url":"https:\/\/github.com\/tanmoyio","followers_url":"https:\/\/api.github.com\/users\/tanmoyio\/followers","following_url":"https:\/\/api.github.com\/users\/tanmoyio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tanmoyio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tanmoyio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tanmoyio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tanmoyio\/orgs","repos_url":"https:\/\/api.github.com\/users\/tanmoyio\/repos","events_url":"https:\/\/api.github.com\/users\/tanmoyio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tanmoyio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T16:15:18Z","updated_at":"2020-12-09T16:47:48Z","closed_at":"2020-12-09T16:47:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1397","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1397","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1397.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1397.patch","merged_at":null},"body":"dataset card creator link has been added \r\nlink: https:\/\/huggingface.co\/datasets\/card-creator\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1397\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1397\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1396","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1396\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1396\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1396\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1396","id":760455295,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MjgzOTAw","number":1396,"title":"initial commit for MultiReQA for second PR","user":{"login":"Karthik-Bhaskar","id":13200370,"node_id":"MDQ6VXNlcjEzMjAwMzcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13200370?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar","html_url":"https:\/\/github.com\/Karthik-Bhaskar","followers_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/followers","following_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/orgs","repos_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/repos","events_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T16:00:35Z","updated_at":"2020-12-10T18:20:12Z","closed_at":"2020-12-10T18:20:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1396","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1396","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1396.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1396.patch","merged_at":null},"body":"Since last PR #1349 had some issues passing the tests. So, a new PR is generated.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1396\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1396\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1395","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1395\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1395\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1395\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1395","id":760448255,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1Mjc4MTQ2","number":1395,"title":"Add WikiSource Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T15:52:06Z","updated_at":"2020-12-14T10:24:14Z","closed_at":"2020-12-14T10:24:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1395","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1395","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1395.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1395.patch","merged_at":"2020-12-14T10:24:13Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1395\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1395\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1394","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1394\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1394\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1394\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1394","id":760436365,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MjY4MzMz","number":1394,"title":"Add OfisPublik Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T15:37:45Z","updated_at":"2020-12-14T10:23:30Z","closed_at":"2020-12-14T10:23:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1394","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1394","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1394.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1394.patch","merged_at":"2020-12-14T10:23:29Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1394\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1394\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1393","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1393\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1393\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1393\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1393","id":760436267,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MjY4MjUx","number":1393,"title":"Add script_version suggestion when dataset\/metric not found","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T15:37:38Z","updated_at":"2020-12-10T18:17:05Z","closed_at":"2020-12-10T18:17:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1393","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1393","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1393.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1393.patch","merged_at":"2020-12-10T18:17:05Z"},"body":"Adds a helpful prompt to the error message when a dataset\/metric is not found, suggesting the user might need to pass `script_version=\"master\"` if the dataset was added recently. The whole error looks like:\r\n\r\n> Couldn't find file locally at blah\/blah.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1\/metrics\/blah\/blah.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/met\r\nrics\/blah\/blah.py.\r\nIf the dataset was added recently, you may need to to pass script_version=\"master\" to find the loading script on the master branch.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1393\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1393\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1392","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1392\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1392\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1392\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1392","id":760432261,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MjY0ODQ5","number":1392,"title":"Add KDE4 Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T15:32:58Z","updated_at":"2020-12-14T10:22:33Z","closed_at":"2020-12-14T10:22:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1392","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1392","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1392.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1392.patch","merged_at":"2020-12-14T10:22:32Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1392\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1392\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1391","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1391\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1391\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1391\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1391","id":760432041,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MjY0NjUx","number":1391,"title":"Add MultiParaCrawl Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T15:32:46Z","updated_at":"2020-12-10T18:39:45Z","closed_at":"2020-12-10T18:39:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1391","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1391","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1391.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1391.patch","merged_at":"2020-12-10T18:39:44Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1391\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1391\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1390","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1390\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1390\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1390\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1390","id":760431051,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MjYzNzk1","number":1390,"title":"Add SPC Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T15:31:51Z","updated_at":"2020-12-14T11:13:53Z","closed_at":"2020-12-14T11:13:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1390","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1390","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1390.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1390.patch","merged_at":"2020-12-14T11:13:52Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1390\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1390\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1389","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1389\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1389\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1389\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1389","id":760402224,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MjM5OTYy","number":1389,"title":"add amazon polarity dataset","user":{"login":"hfawaz","id":29229602,"node_id":"MDQ6VXNlcjI5MjI5NjAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29229602?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hfawaz","html_url":"https:\/\/github.com\/hfawaz","followers_url":"https:\/\/api.github.com\/users\/hfawaz\/followers","following_url":"https:\/\/api.github.com\/users\/hfawaz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hfawaz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hfawaz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hfawaz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hfawaz\/orgs","repos_url":"https:\/\/api.github.com\/users\/hfawaz\/repos","events_url":"https:\/\/api.github.com\/users\/hfawaz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hfawaz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-09T14:58:21Z","updated_at":"2020-12-11T11:45:39Z","closed_at":"2020-12-11T11:41:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1389","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1389","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1389.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1389.patch","merged_at":"2020-12-11T11:41:01Z"},"body":"This corresponds to the amazon (binary dataset) requested in https:\/\/github.com\/huggingface\/datasets\/issues\/353","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1389\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1389\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1388","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1388\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1388\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1388\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1388","id":760373136,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MjE1Nzk2","number":1388,"title":"hind_encorp","user":{"login":"rahul-art","id":56379013,"node_id":"MDQ6VXNlcjU2Mzc5MDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56379013?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rahul-art","html_url":"https:\/\/github.com\/rahul-art","followers_url":"https:\/\/api.github.com\/users\/rahul-art\/followers","following_url":"https:\/\/api.github.com\/users\/rahul-art\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rahul-art\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rahul-art\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rahul-art\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rahul-art\/orgs","repos_url":"https:\/\/api.github.com\/users\/rahul-art\/repos","events_url":"https:\/\/api.github.com\/users\/rahul-art\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rahul-art\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T14:22:59Z","updated_at":"2020-12-09T14:46:51Z","closed_at":"2020-12-09T14:46:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1388","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1388","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1388.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1388.patch","merged_at":null},"body":"resubmit of hind_encorp file changes","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1388\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1388\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1387","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1387\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1387\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1387\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1387","id":760368355,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MjExODQ1","number":1387,"title":"Add LIAR dataset","user":{"login":"hugoabonizio","id":1206395,"node_id":"MDQ6VXNlcjEyMDYzOTU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1206395?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hugoabonizio","html_url":"https:\/\/github.com\/hugoabonizio","followers_url":"https:\/\/api.github.com\/users\/hugoabonizio\/followers","following_url":"https:\/\/api.github.com\/users\/hugoabonizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hugoabonizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hugoabonizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hugoabonizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hugoabonizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/hugoabonizio\/repos","events_url":"https:\/\/api.github.com\/users\/hugoabonizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hugoabonizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T14:16:55Z","updated_at":"2020-12-14T18:06:43Z","closed_at":"2020-12-14T16:23:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1387","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1387","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1387.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1387.patch","merged_at":"2020-12-14T16:23:59Z"},"body":"Add LIAR dataset from [\u201cLiar, Liar Pants on Fire\u201d: A New Benchmark Dataset for Fake News Detection](https:\/\/www.aclweb.org\/anthology\/P17-2067\/).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1387\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1387\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1386","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1386\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1386\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1386\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1386","id":760365505,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MjA5NDUx","number":1386,"title":"Add RecipeNLG Dataset (manual download)","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T14:13:19Z","updated_at":"2020-12-10T16:58:22Z","closed_at":"2020-12-10T16:58:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1386","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1386","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1386.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1386.patch","merged_at":"2020-12-10T16:58:21Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1386\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1386\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1385","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1385\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1385\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1385\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1385","id":760351405,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTk3Nzk5","number":1385,"title":"add best2009","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T13:56:09Z","updated_at":"2020-12-14T10:59:08Z","closed_at":"2020-12-14T10:59:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1385","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1385","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1385.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1385.patch","merged_at":"2020-12-14T10:59:08Z"},"body":"`best2009` is a Thai word-tokenization dataset from encyclopedia, novels, news and articles by [NECTEC](https:\/\/www.nectec.or.th\/) (148,995\/2,252 lines of train\/test). It was created for [BEST 2010: Word Tokenization Competition](https:\/\/thailang.nectec.or.th\/archive\/indexa290.html?q=node\/10). The test set answers are not provided publicly.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1385\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1385\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1384","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1384\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1384\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1384\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1384","id":760331767,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTgxMjg1","number":1384,"title":"Add News Commentary Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T13:30:36Z","updated_at":"2020-12-10T16:54:08Z","closed_at":"2020-12-10T16:54:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1384","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1384","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1384.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1384.patch","merged_at":"2020-12-10T16:54:07Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1384\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1384\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1383","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1383\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1383\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1383\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1383","id":760331480,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTgxMDQ2","number":1383,"title":"added conv ai 2","user":{"login":"rkc007","id":22396042,"node_id":"MDQ6VXNlcjIyMzk2MDQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22396042?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rkc007","html_url":"https:\/\/github.com\/rkc007","followers_url":"https:\/\/api.github.com\/users\/rkc007\/followers","following_url":"https:\/\/api.github.com\/users\/rkc007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rkc007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rkc007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rkc007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rkc007\/orgs","repos_url":"https:\/\/api.github.com\/users\/rkc007\/repos","events_url":"https:\/\/api.github.com\/users\/rkc007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rkc007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T13:30:12Z","updated_at":"2020-12-13T18:54:42Z","closed_at":"2020-12-13T18:54:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1383","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1383","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1383.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1383.patch","merged_at":null},"body":"Dataset : https:\/\/github.com\/DeepPavlov\/convai\/tree\/master\/2018","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1383\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1383\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1382","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1382\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1382\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1382\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1382","id":760325077,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTc1NzMx","number":1382,"title":"adding UNPC","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T13:21:41Z","updated_at":"2020-12-09T17:53:06Z","closed_at":"2020-12-09T17:53:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1382","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1382","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1382.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1382.patch","merged_at":"2020-12-09T17:53:06Z"},"body":"Adding United Nations Parallel Corpus\r\nhttp:\/\/opus.nlpl.eu\/UNPC.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1382\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1382\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1381","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1381\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1381\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1381\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1381","id":760320960,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTcyMjkw","number":1381,"title":"Add twi text c3","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-09T13:16:38Z","updated_at":"2020-12-13T18:39:27Z","closed_at":"2020-12-13T18:39:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1381","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1381","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1381.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1381.patch","merged_at":null},"body":"Added Twi texts for training embeddings and language models based on the paper https:\/\/www.aclweb.org\/anthology\/2020.lrec-1.335\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1381\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1381\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1380","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1380\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1380\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1380\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1380","id":760320494,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTcxOTAw","number":1380,"title":"Add Tatoeba Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T13:16:04Z","updated_at":"2020-12-10T16:54:28Z","closed_at":"2020-12-10T16:54:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1380","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1380","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1380.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1380.patch","merged_at":"2020-12-10T16:54:27Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1380\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1380\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1379","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1379\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1379\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1379\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1379","id":760320487,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTcxODk0","number":1379,"title":"Add yoruba text c3","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2020-12-09T13:16:03Z","updated_at":"2020-12-13T18:45:12Z","closed_at":"2020-12-13T18:37:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1379","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1379","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1379.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1379.patch","merged_at":"2020-12-13T18:37:32Z"},"body":"Added Yoruba texts for training embeddings and language models based on the paper https:\/\/www.aclweb.org\/anthology\/2020.lrec-1.335\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1379\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1379\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1378","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1378\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1378\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1378\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1378","id":760313108,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTY1OTE3","number":1378,"title":"Add FACTCK.BR dataset","user":{"login":"hugoabonizio","id":1206395,"node_id":"MDQ6VXNlcjEyMDYzOTU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1206395?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hugoabonizio","html_url":"https:\/\/github.com\/hugoabonizio","followers_url":"https:\/\/api.github.com\/users\/hugoabonizio\/followers","following_url":"https:\/\/api.github.com\/users\/hugoabonizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hugoabonizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hugoabonizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hugoabonizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hugoabonizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/hugoabonizio\/repos","events_url":"https:\/\/api.github.com\/users\/hugoabonizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hugoabonizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T13:06:22Z","updated_at":"2020-12-17T12:38:45Z","closed_at":"2020-12-15T15:34:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1378","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1378","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1378.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1378.patch","merged_at":"2020-12-15T15:34:11Z"},"body":"This PR adds [FACTCK.BR](https:\/\/github.com\/jghm-f\/FACTCK.BR) dataset from [FACTCK.BR: a new dataset to study fake news](https:\/\/dl.acm.org\/doi\/10.1145\/3323503.3361698).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1378\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1378\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1377","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1377\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1377\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1377\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1377","id":760309435,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTYyOTcz","number":1377,"title":"adding marathi-wiki dataset","user":{"login":"ekdnam","id":40426312,"node_id":"MDQ6VXNlcjQwNDI2MzEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40426312?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ekdnam","html_url":"https:\/\/github.com\/ekdnam","followers_url":"https:\/\/api.github.com\/users\/ekdnam\/followers","following_url":"https:\/\/api.github.com\/users\/ekdnam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ekdnam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ekdnam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ekdnam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ekdnam\/orgs","repos_url":"https:\/\/api.github.com\/users\/ekdnam\/repos","events_url":"https:\/\/api.github.com\/users\/ekdnam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ekdnam\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T13:01:20Z","updated_at":"2020-12-11T12:46:37Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1377","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1377","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1377.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1377.patch","merged_at":null},"body":"Adding marathi-wiki-articles dataset. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1377\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1377\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1376","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1376\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1376\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1376\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1376","id":760309300,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTYyODU4","number":1376,"title":"Add SETimes Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T13:01:08Z","updated_at":"2020-12-10T16:11:57Z","closed_at":"2020-12-10T16:11:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1376","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1376","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1376.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1376.patch","merged_at":"2020-12-10T16:11:56Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1376\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1376\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1375","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1375\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1375\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1375\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1375","id":760294931,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTUwOTk2","number":1375,"title":"Add OPUS EMEA Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T12:39:44Z","updated_at":"2020-12-10T16:11:09Z","closed_at":"2020-12-10T16:11:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1375","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1375","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1375.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1375.patch","merged_at":"2020-12-10T16:11:08Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1375\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1375\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1374","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1374\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1374\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1374\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1374","id":760288291,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTQ1Mzgw","number":1374,"title":"Add OPUS Tilde Model Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T12:29:23Z","updated_at":"2020-12-10T16:11:29Z","closed_at":"2020-12-10T16:11:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1374","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1374","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1374.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1374.patch","merged_at":"2020-12-10T16:11:28Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1374\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1374\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1373","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1373\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1373\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1373\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1373","id":760280869,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTM5MTY0","number":1373,"title":"Add OPUS ECB Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T12:18:22Z","updated_at":"2020-12-10T15:25:55Z","closed_at":"2020-12-10T15:25:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1373","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1373","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1373.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1373.patch","merged_at":"2020-12-10T15:25:54Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1373\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1373\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1372","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1372\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1372\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1372\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1372","id":760274046,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTMzMzQ4","number":1372,"title":"Add OPUS Books Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T12:08:49Z","updated_at":"2020-12-14T09:56:28Z","closed_at":"2020-12-14T09:56:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1372","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1372","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1372.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1372.patch","merged_at":"2020-12-14T09:56:27Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1372\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1372\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1371","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1371\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1371\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1371\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1371","id":760270116,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTMwMTQ1","number":1371,"title":"Adding Scielo","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T12:02:48Z","updated_at":"2020-12-09T17:53:37Z","closed_at":"2020-12-09T17:53:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1371","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1371","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1371.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1371.patch","merged_at":"2020-12-09T17:53:37Z"},"body":"Adding Scielo: Parallel corpus of full-text articles in Portuguese, English and Spanish from SciELO\r\nhttps:\/\/sites.google.com\/view\/felipe-soares\/datasets#h.p_92uSCyAjWSRB","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1371\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1371\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1370","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1370\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1370\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1370\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1370","id":760264132,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MTI1MTc3","number":1370,"title":"Add OPUS PHP Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T11:53:30Z","updated_at":"2020-12-10T15:37:25Z","closed_at":"2020-12-10T15:37:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1370","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1370","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1370.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1370.patch","merged_at":"2020-12-10T15:37:24Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1370\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1370\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1369","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1369\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1369\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1369\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1369","id":760227776,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MDk0NDk1","number":1369,"title":"Use passed --cache_dir for modules cache","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-12-09T10:59:59Z","updated_at":"2021-04-23T10:34:07Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1369","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1369","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1369.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1369.patch","merged_at":null},"body":"When passed `--cache_dir` arg:\r\n```shell\r\npython datasets-cli test datasets\/ --save_infos --all_configs --cache_dir \r\n```\r\nit is not used for caching the modules, which are cached in the default location at `.cache\/huggingface\/modules`.\r\n\r\nWith this fix, the modules will be cached at `\/modules`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1369\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1369\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1368","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1368\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1368\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1368\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1368","id":760222616,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MDkwMjM0","number":1368,"title":"Re-adding narrativeqa dataset","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-09T10:53:09Z","updated_at":"2020-12-11T13:30:59Z","closed_at":"2020-12-11T13:30:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1368","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1368","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1368.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1368.patch","merged_at":null},"body":"An update of #309. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1368\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1368\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1367","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1367\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1367\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1367\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1367","id":760208191,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MDc4MTAx","number":1367,"title":"adding covid-tweets-japanese","user":{"login":"forest1988","id":2755894,"node_id":"MDQ6VXNlcjI3NTU4OTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2755894?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/forest1988","html_url":"https:\/\/github.com\/forest1988","followers_url":"https:\/\/api.github.com\/users\/forest1988\/followers","following_url":"https:\/\/api.github.com\/users\/forest1988\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/forest1988\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/forest1988\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/forest1988\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/forest1988\/orgs","repos_url":"https:\/\/api.github.com\/users\/forest1988\/repos","events_url":"https:\/\/api.github.com\/users\/forest1988\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/forest1988\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T10:34:01Z","updated_at":"2020-12-09T17:25:14Z","closed_at":"2020-12-09T17:25:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1367","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1367","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1367.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1367.patch","merged_at":null},"body":"Adding COVID-19 Japanese Tweets Dataset as part of the sprint.\r\n\r\nTesting with dummy data is not working (the file is said to not exist). Sorry for the incomplete PR.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1367\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1367\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1366","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1366\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1366\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1366\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1366","id":760205506,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MDc1ODU2","number":1366,"title":"Adding Hope EDI dataset","user":{"login":"jamespaultg","id":7421838,"node_id":"MDQ6VXNlcjc0MjE4Mzg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7421838?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jamespaultg","html_url":"https:\/\/github.com\/jamespaultg","followers_url":"https:\/\/api.github.com\/users\/jamespaultg\/followers","following_url":"https:\/\/api.github.com\/users\/jamespaultg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jamespaultg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jamespaultg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jamespaultg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jamespaultg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jamespaultg\/repos","events_url":"https:\/\/api.github.com\/users\/jamespaultg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jamespaultg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T10:30:23Z","updated_at":"2020-12-14T14:27:57Z","closed_at":"2020-12-14T14:27:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1366","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1366","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1366.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1366.patch","merged_at":"2020-12-14T14:27:57Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1366\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1366\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1365","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1365\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1365\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1365\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1365","id":760188457,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MDYxNTI2","number":1365,"title":"Add Mkqa dataset","user":{"login":"cceyda","id":15624271,"node_id":"MDQ6VXNlcjE1NjI0Mjcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15624271?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cceyda","html_url":"https:\/\/github.com\/cceyda","followers_url":"https:\/\/api.github.com\/users\/cceyda\/followers","following_url":"https:\/\/api.github.com\/users\/cceyda\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cceyda\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cceyda\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cceyda\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cceyda\/orgs","repos_url":"https:\/\/api.github.com\/users\/cceyda\/repos","events_url":"https:\/\/api.github.com\/users\/cceyda\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cceyda\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T10:06:33Z","updated_at":"2020-12-10T15:37:56Z","closed_at":"2020-12-10T15:37:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1365","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1365","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1365.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1365.patch","merged_at":"2020-12-10T15:37:56Z"},"body":"# MKQA: Multilingual Knowledge Questions & Answers Dataset\r\nAdding the [MKQA](https:\/\/github.com\/apple\/ml-mkqa) dataset as part of the sprint \ud83c\udf89\r\n\r\nThere is no official data splits so I added just a `train` split.\r\n \r\ndifferently from the original:\r\n- answer:type field is a ClassLabel (I thought it might be possible to train on this as a label for categorizing questions)\r\n- answer:entity field has a default value of empty string '' (since this key is not available for all in original)\r\n- answer:alias has default value of []\r\n\r\n- [x] All tests passed\r\n- [x] Added dummy data\r\n- [x] Added data card (as much as I could)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1365\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1365\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1364","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1364\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1364\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1364\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1364","id":760164558,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MDQxNjUz","number":1364,"title":"Narrative QA (Manual Download Stories) Dataset","user":{"login":"rsanjaykamath","id":18527321,"node_id":"MDQ6VXNlcjE4NTI3MzIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18527321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rsanjaykamath","html_url":"https:\/\/github.com\/rsanjaykamath","followers_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/followers","following_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/orgs","repos_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/repos","events_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-09T09:33:59Z","updated_at":"2021-01-25T15:31:51Z","closed_at":"2021-01-25T15:31:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1364","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1364","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1364.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1364.patch","merged_at":null},"body":"Narrative QA with manual download for stories. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1364\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1364\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1363","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1363\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1363\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1363\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1363","id":760160944,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MDM4NjM0","number":1363,"title":"Adding OPUS MultiUN","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T09:29:01Z","updated_at":"2020-12-09T17:54:20Z","closed_at":"2020-12-09T17:54:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1363","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1363","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1363.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1363.patch","merged_at":"2020-12-09T17:54:19Z"},"body":"Adding UnMulti\r\nhttp:\/\/www.euromatrixplus.net\/multi-un\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1363\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1363\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1362","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1362\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1362\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1362\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1362","id":760138233,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM1MDIwMDAz","number":1362,"title":"adding opus_infopankki","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-09T08:57:10Z","updated_at":"2020-12-09T18:16:20Z","closed_at":"2020-12-09T18:13:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1362","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1362","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1362.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1362.patch","merged_at":"2020-12-09T18:13:48Z"},"body":"Adding opus_infopankki\r\nhttp:\/\/opus.nlpl.eu\/infopankki-v1.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1362\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1362\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1361","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1361\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1361\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1361\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1361","id":760101728,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0OTg5Nzcy","number":1361,"title":"adding bprec","user":{"login":"kldarek","id":15803781,"node_id":"MDQ6VXNlcjE1ODAzNzgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15803781?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kldarek","html_url":"https:\/\/github.com\/kldarek","followers_url":"https:\/\/api.github.com\/users\/kldarek\/followers","following_url":"https:\/\/api.github.com\/users\/kldarek\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kldarek\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kldarek\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kldarek\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kldarek\/orgs","repos_url":"https:\/\/api.github.com\/users\/kldarek\/repos","events_url":"https:\/\/api.github.com\/users\/kldarek\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kldarek\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-09T08:02:45Z","updated_at":"2020-12-16T17:04:44Z","closed_at":"2020-12-16T17:04:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1361","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1361","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1361.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1361.patch","merged_at":"2020-12-16T17:04:44Z"},"body":"Brand-Product Relation Extraction Corpora in Polish","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1361\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1361\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1360","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1360\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1360\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1360\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1360","id":760088419,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0OTc4NzM0","number":1360,"title":"add wisesight1000","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T07:41:30Z","updated_at":"2020-12-10T14:28:41Z","closed_at":"2020-12-10T14:28:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1360","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1360","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1360.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1360.patch","merged_at":"2020-12-10T14:28:41Z"},"body":"`wisesight1000` contains Thai social media texts randomly drawn from the full `wisesight-sentiment`, tokenized by human annotators. Out of the labels `neg` (negative), `neu` (neutral), `pos` (positive), `q` (question), 250 samples each. Some texts are removed because they look like spam.Because these samples are representative of real world content, we believe having these annotaed samples will allow the community to robustly evaluate tokenization algorithms.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1360\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1360\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1359","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1359\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1359\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1359\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1359","id":760055969,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0OTUxMTgy","number":1359,"title":"Add JNLPBA","user":{"login":"edugp","id":17855740,"node_id":"MDQ6VXNlcjE3ODU1NzQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17855740?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/edugp","html_url":"https:\/\/github.com\/edugp","followers_url":"https:\/\/api.github.com\/users\/edugp\/followers","following_url":"https:\/\/api.github.com\/users\/edugp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/edugp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/edugp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/edugp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/edugp\/orgs","repos_url":"https:\/\/api.github.com\/users\/edugp\/repos","events_url":"https:\/\/api.github.com\/users\/edugp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/edugp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T06:48:51Z","updated_at":"2020-12-10T14:24:36Z","closed_at":"2020-12-10T14:24:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1359","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1359","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1359.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1359.patch","merged_at":"2020-12-10T14:24:36Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1359\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1359\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1358","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1358\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1358\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1358\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1358","id":760031131,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0OTI5ODIx","number":1358,"title":"Add spider dataset","user":{"login":"olinguyen","id":4341867,"node_id":"MDQ6VXNlcjQzNDE4Njc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4341867?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/olinguyen","html_url":"https:\/\/github.com\/olinguyen","followers_url":"https:\/\/api.github.com\/users\/olinguyen\/followers","following_url":"https:\/\/api.github.com\/users\/olinguyen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/olinguyen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/olinguyen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/olinguyen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/olinguyen\/orgs","repos_url":"https:\/\/api.github.com\/users\/olinguyen\/repos","events_url":"https:\/\/api.github.com\/users\/olinguyen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/olinguyen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T06:06:18Z","updated_at":"2020-12-10T15:12:31Z","closed_at":"2020-12-10T15:12:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1358","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1358","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1358.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1358.patch","merged_at":"2020-12-10T15:12:31Z"},"body":"This PR adds the Spider dataset, a large-scale complex and cross-domain semantic parsing and text-to-SQL dataset annotated by 11 Yale students. The goal of the Spider challenge is to develop natural language interfaces to cross-domain databases.\r\n\r\nDataset website: https:\/\/yale-lily.github.io\/spider\r\nPaper link: https:\/\/www.aclweb.org\/anthology\/D18-1425\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1358\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1358\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1357","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1357\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1357\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1357\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1357","id":760023525,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0OTIzMzA4","number":1357,"title":"Youtube caption corrections","user":{"login":"2dot71mily","id":21292059,"node_id":"MDQ6VXNlcjIxMjkyMDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21292059?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/2dot71mily","html_url":"https:\/\/github.com\/2dot71mily","followers_url":"https:\/\/api.github.com\/users\/2dot71mily\/followers","following_url":"https:\/\/api.github.com\/users\/2dot71mily\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/2dot71mily\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/2dot71mily\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/2dot71mily\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/2dot71mily\/orgs","repos_url":"https:\/\/api.github.com\/users\/2dot71mily\/repos","events_url":"https:\/\/api.github.com\/users\/2dot71mily\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/2dot71mily\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-12-09T05:52:34Z","updated_at":"2020-12-15T18:12:56Z","closed_at":"2020-12-15T18:12:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1357","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1357","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1357.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1357.patch","merged_at":"2020-12-15T18:12:56Z"},"body":"This PR adds a new dataset of YouTube captions, error and corrections. This dataset was created in just the last week, as inspired by this sprint!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1357\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1357\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1356","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1356\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1356\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1356\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1356","id":759994457,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0ODk3OTQ1","number":1356,"title":"Add StackOverflow StackSample dataset","user":{"login":"ncoop57","id":7613470,"node_id":"MDQ6VXNlcjc2MTM0NzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7613470?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ncoop57","html_url":"https:\/\/github.com\/ncoop57","followers_url":"https:\/\/api.github.com\/users\/ncoop57\/followers","following_url":"https:\/\/api.github.com\/users\/ncoop57\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ncoop57\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ncoop57\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ncoop57\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ncoop57\/orgs","repos_url":"https:\/\/api.github.com\/users\/ncoop57\/repos","events_url":"https:\/\/api.github.com\/users\/ncoop57\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ncoop57\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-09T04:59:51Z","updated_at":"2020-12-21T14:48:21Z","closed_at":"2020-12-21T14:48:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1356","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1356","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1356.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1356.patch","merged_at":"2020-12-21T14:48:21Z"},"body":"This PR adds the StackOverflow StackSample dataset from Kaggle: https:\/\/www.kaggle.com\/stackoverflow\/stacksample\r\n\r\nRan through all of the steps. However, since my dataset requires manually downloading the data, I was unable to run the pytest on the real dataset (the dummy data pytest passed).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1356\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1356\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1355","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1355\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1355\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1355\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1355","id":759994208,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0ODk3NzQw","number":1355,"title":"Addition of py_ast dataset","user":{"login":"reshinthadithyan","id":36307201,"node_id":"MDQ6VXNlcjM2MzA3MjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36307201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/reshinthadithyan","html_url":"https:\/\/github.com\/reshinthadithyan","followers_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/followers","following_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/repos","events_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T04:59:17Z","updated_at":"2020-12-09T16:19:49Z","closed_at":"2020-12-09T16:19:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1355","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1355","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1355.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1355.patch","merged_at":"2020-12-09T16:19:48Z"},"body":"@lhoestq as discussed in PR #1195 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1355\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1355\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1354","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1354\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1354\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1354\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1354","id":759987763,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0ODkyMzE2","number":1354,"title":"Add TweetQA dataset","user":{"login":"anaerobeth","id":3663322,"node_id":"MDQ6VXNlcjM2NjMzMjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3663322?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anaerobeth","html_url":"https:\/\/github.com\/anaerobeth","followers_url":"https:\/\/api.github.com\/users\/anaerobeth\/followers","following_url":"https:\/\/api.github.com\/users\/anaerobeth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anaerobeth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anaerobeth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anaerobeth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anaerobeth\/orgs","repos_url":"https:\/\/api.github.com\/users\/anaerobeth\/repos","events_url":"https:\/\/api.github.com\/users\/anaerobeth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anaerobeth\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T04:44:01Z","updated_at":"2020-12-10T15:10:30Z","closed_at":"2020-12-10T15:10:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1354","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1354","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1354.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1354.patch","merged_at":"2020-12-10T15:10:30Z"},"body":"This PR adds the TweetQA dataset, the first dataset for QA on social media data by leveraging news media and crowdsourcing.\r\n\r\nPaper: https:\/\/arxiv.org\/abs\/1907.06292\r\nRepository: https:\/\/tweetqa.github.io\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1354\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1354\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1353","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1353\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1353\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1353\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1353","id":759980004,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0ODg2MDk4","number":1353,"title":"New instruction for how to generate dataset_infos.json","user":{"login":"ncoop57","id":7613470,"node_id":"MDQ6VXNlcjc2MTM0NzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7613470?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ncoop57","html_url":"https:\/\/github.com\/ncoop57","followers_url":"https:\/\/api.github.com\/users\/ncoop57\/followers","following_url":"https:\/\/api.github.com\/users\/ncoop57\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ncoop57\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ncoop57\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ncoop57\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ncoop57\/orgs","repos_url":"https:\/\/api.github.com\/users\/ncoop57\/repos","events_url":"https:\/\/api.github.com\/users\/ncoop57\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ncoop57\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T04:24:40Z","updated_at":"2020-12-10T13:45:15Z","closed_at":"2020-12-10T13:45:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1353","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1353","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1353.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1353.patch","merged_at":"2020-12-10T13:45:15Z"},"body":"Add additional instructions for how to generate dataset_infos.json for manual download datasets. Information courtesy of `Taimur Ibrahim` from the slack channel","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1353\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1353\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1352","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1352\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1352\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1352\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1352","id":759978543,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0ODg0ODg4","number":1352,"title":"change url for prachathai67k to internet archive","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T04:20:37Z","updated_at":"2020-12-10T13:42:17Z","closed_at":"2020-12-10T13:42:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1352","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1352","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1352.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1352.patch","merged_at":"2020-12-10T13:42:17Z"},"body":"`prachathai67k` is currently downloaded from git-lfs of PyThaiNLP github. Since the size is quite large (~250MB), I moved the URL to archive.org in order to prevent rate limit issues.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1352\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1352\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1351","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1351\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1351\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1351\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1351","id":759902770,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0ODI0NTcw","number":1351,"title":"added craigslist_bargians","user":{"login":"ZacharySBrown","id":7950786,"node_id":"MDQ6VXNlcjc5NTA3ODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7950786?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZacharySBrown","html_url":"https:\/\/github.com\/ZacharySBrown","followers_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/followers","following_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/repos","events_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-09T01:02:31Z","updated_at":"2020-12-10T14:14:34Z","closed_at":"2020-12-10T14:14:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1351","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1351","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1351.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1351.patch","merged_at":"2020-12-10T14:14:34Z"},"body":"`craigslist_bargains` data set from [here](https:\/\/worksheets.codalab.org\/worksheets\/0x453913e76b65495d8b9730d41c7e0a0c\/)\r\n\r\n(Cleaned up version of #1278)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1351\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1351\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1350","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1350\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1350\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1350\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1350","id":759879789,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0ODA1OTY3","number":1350,"title":"add LeNER-Br dataset","user":{"login":"jonatasgrosman","id":5097052,"node_id":"MDQ6VXNlcjUwOTcwNTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5097052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonatasgrosman","html_url":"https:\/\/github.com\/jonatasgrosman","followers_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/followers","following_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/repos","events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonatasgrosman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-09T00:06:38Z","updated_at":"2020-12-10T14:11:33Z","closed_at":"2020-12-10T14:11:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1350","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1350","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1350.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1350.patch","merged_at":"2020-12-10T14:11:33Z"},"body":"Adding the LeNER-Br dataset, a Portuguese language dataset for named entity recognition ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1350\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1350\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1349","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1349\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1349\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1349\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1349","id":759870664,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0Nzk4NDQ3","number":1349,"title":"initial commit for MultiReQA ","user":{"login":"Karthik-Bhaskar","id":13200370,"node_id":"MDQ6VXNlcjEzMjAwMzcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13200370?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar","html_url":"https:\/\/github.com\/Karthik-Bhaskar","followers_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/followers","following_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/orgs","repos_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/repos","events_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Karthik-Bhaskar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-08T23:44:34Z","updated_at":"2020-12-09T16:46:37Z","closed_at":"2020-12-09T16:46:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1349","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1349","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1349.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1349.patch","merged_at":null},"body":"Added MultiReQA, which is a dataset containing the sentence boundary annotation from eight publicly available QA datasets including SearchQA, TriviaQA, HotpotQA, NaturalQuestions, SQuAD, BioASQ, RelationExtraction, and TextbookQA. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1349\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1349\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1348","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1348\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1348\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1348\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1348","id":759869849,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0Nzk3Nzcy","number":1348,"title":"add Yoruba NER dataset","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-08T23:42:35Z","updated_at":"2020-12-10T14:30:25Z","closed_at":"2020-12-10T14:09:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1348","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1348","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1348.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1348.patch","merged_at":"2020-12-10T14:09:43Z"},"body":"Added Yoruba GV dataset based on this paper","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1348\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1348\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1347","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1347\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1347\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1347\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1347","id":759845231,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0Nzc3NjQ0","number":1347,"title":"Add spanish billion words corpus","user":{"login":"mariagrandury","id":57645283,"node_id":"MDQ6VXNlcjU3NjQ1Mjgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57645283?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariagrandury","html_url":"https:\/\/github.com\/mariagrandury","followers_url":"https:\/\/api.github.com\/users\/mariagrandury\/followers","following_url":"https:\/\/api.github.com\/users\/mariagrandury\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariagrandury\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariagrandury\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariagrandury\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariagrandury\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariagrandury\/repos","events_url":"https:\/\/api.github.com\/users\/mariagrandury\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariagrandury\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-08T22:51:38Z","updated_at":"2020-12-11T11:26:39Z","closed_at":"2020-12-11T11:15:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1347","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1347","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1347.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1347.patch","merged_at":null},"body":"Add an unannotated Spanish corpus of nearly 1.5 billion words, compiled from different resources from the web.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1347\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1347\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1346","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1346\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1346\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1346\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1346","id":759844137,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0Nzc2ODE5","number":1346,"title":"Add MultiBooked dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T22:49:36Z","updated_at":"2020-12-15T17:02:09Z","closed_at":"2020-12-15T17:02:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1346","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1346","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1346.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1346.patch","merged_at":"2020-12-15T17:02:08Z"},"body":"Add dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1346\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1346\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1345","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1345\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1345\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1345\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1345","id":759835486,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NzY5NzMw","number":1345,"title":"First commit of NarrativeQA Dataset","user":{"login":"rsanjaykamath","id":18527321,"node_id":"MDQ6VXNlcjE4NTI3MzIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18527321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rsanjaykamath","html_url":"https:\/\/github.com\/rsanjaykamath","followers_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/followers","following_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/orgs","repos_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/repos","events_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rsanjaykamath\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T22:31:59Z","updated_at":"2021-01-25T15:31:52Z","closed_at":"2020-12-09T09:29:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1345","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1345","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1345.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1345.patch","merged_at":null},"body":"Added NarrativeQA dataset and included a manual downloading option to download scripts from the original scripts provided by the authors. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1345\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1345\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1344","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1344\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1344\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1344\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1344","id":759831925,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NzY2ODIy","number":1344,"title":"Add hausa ner corpus","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T22:25:04Z","updated_at":"2020-12-08T23:11:55Z","closed_at":"2020-12-08T23:11:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1344","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1344","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1344.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1344.patch","merged_at":null},"body":"Added Hausa VOA NER data ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1344\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1344\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1343","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1343\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1343\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1343\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1343","id":759809999,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NzQ4NTE4","number":1343,"title":"Add LiveQA","user":{"login":"j-chim","id":22435209,"node_id":"MDQ6VXNlcjIyNDM1MjA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22435209?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/j-chim","html_url":"https:\/\/github.com\/j-chim","followers_url":"https:\/\/api.github.com\/users\/j-chim\/followers","following_url":"https:\/\/api.github.com\/users\/j-chim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/j-chim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/j-chim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/j-chim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/j-chim\/orgs","repos_url":"https:\/\/api.github.com\/users\/j-chim\/repos","events_url":"https:\/\/api.github.com\/users\/j-chim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/j-chim\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T21:52:36Z","updated_at":"2020-12-14T09:40:28Z","closed_at":"2020-12-14T09:40:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1343","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1343","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1343.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1343.patch","merged_at":"2020-12-14T09:40:28Z"},"body":"This PR adds LiveQA, the Chinese real-time\/timeline-based QA task by [Liu et al., 2020](https:\/\/arxiv.org\/pdf\/2010.00526.pdf). ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1343\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1343\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1342","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1342\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1342\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1342\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1342","id":759794121,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NzM1MzAw","number":1342,"title":"[yaml] Fix metadata according to pre-specified scheme","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T21:26:34Z","updated_at":"2020-12-09T15:37:27Z","closed_at":"2020-12-09T15:37:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1342","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1342","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1342.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1342.patch","merged_at":"2020-12-09T15:37:26Z"},"body":"@lhoestq @yjernite ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1342\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1342\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1341","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1341\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1341\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1341\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1341","id":759784557,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NzI3MzU5","number":1341,"title":"added references to only data card creator to all guides","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T21:11:11Z","updated_at":"2020-12-08T21:36:12Z","closed_at":"2020-12-08T21:36:11Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1341","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1341","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1341.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1341.patch","merged_at":"2020-12-08T21:36:11Z"},"body":"We can now use the wonderful online form for dataset cards created by @evrardts ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1341\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1341\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1340","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1340\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1340\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1340\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1340","id":759765408,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NzExMjc5","number":1340,"title":":fist: \u00a1Viva la Independencia!","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T20:43:43Z","updated_at":"2020-12-14T10:36:01Z","closed_at":"2020-12-14T10:36:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1340","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1340","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1340.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1340.patch","merged_at":"2020-12-14T10:36:01Z"},"body":"Adds the Catalonia Independence Corpus for stance-detection of Tweets.\r\n\r\nReady for review!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1340\/reactions","total_count":8,"+1":0,"-1":0,"laugh":4,"hooray":3,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1340\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1339","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1339\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1339\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1339\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1339","id":759744088,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0Njk0NDI4","number":1339,"title":"hate_speech_18 initial commit","user":{"login":"czabo","id":75574105,"node_id":"MDQ6VXNlcjc1NTc0MTA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75574105?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/czabo","html_url":"https:\/\/github.com\/czabo","followers_url":"https:\/\/api.github.com\/users\/czabo\/followers","following_url":"https:\/\/api.github.com\/users\/czabo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/czabo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/czabo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/czabo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/czabo\/orgs","repos_url":"https:\/\/api.github.com\/users\/czabo\/repos","events_url":"https:\/\/api.github.com\/users\/czabo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/czabo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-08T20:10:08Z","updated_at":"2020-12-12T16:17:32Z","closed_at":"2020-12-12T16:17:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1339","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1339","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1339.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1339.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1339\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1339\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1338","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1338\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1338\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1338\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1338","id":759725770,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0Njc5ODcz","number":1338,"title":"Add GigaFren Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T19:42:04Z","updated_at":"2020-12-14T10:03:47Z","closed_at":"2020-12-14T10:03:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1338","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1338","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1338.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1338.patch","merged_at":"2020-12-14T10:03:46Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1338\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1338\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1337","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1337\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1337\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1337\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1337","id":759710482,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjY3NDUz","number":1337,"title":"Add spanish billion words","user":{"login":"mariagrandury","id":57645283,"node_id":"MDQ6VXNlcjU3NjQ1Mjgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57645283?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariagrandury","html_url":"https:\/\/github.com\/mariagrandury","followers_url":"https:\/\/api.github.com\/users\/mariagrandury\/followers","following_url":"https:\/\/api.github.com\/users\/mariagrandury\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariagrandury\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariagrandury\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariagrandury\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariagrandury\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariagrandury\/repos","events_url":"https:\/\/api.github.com\/users\/mariagrandury\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariagrandury\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T19:18:02Z","updated_at":"2020-12-08T22:59:38Z","closed_at":"2020-12-08T21:15:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1337","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1337","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1337.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1337.patch","merged_at":null},"body":"Add an unannotated corpus of the Spanish language of nearly 1.5 billion words, compiled from different resources from the web.\r\n\r\nThe dataset needs 10 GB (download: 1.89 GiB, generated: 8.34 GiB, post-processed: Unknown size, total: 10.22 GiB), the test using dummy data pass but my laptop isn't able to run it on the real data (I left it running for over 8 hours and it didn't finish).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1337\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1337\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1336","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1336\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1336\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1336\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1336","id":759706932,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjY0NjIw","number":1336,"title":"Add dataset Yoruba BBC Topic Classification","user":{"login":"michael-aloys","id":1858628,"node_id":"MDQ6VXNlcjE4NTg2Mjg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1858628?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/michael-aloys","html_url":"https:\/\/github.com\/michael-aloys","followers_url":"https:\/\/api.github.com\/users\/michael-aloys\/followers","following_url":"https:\/\/api.github.com\/users\/michael-aloys\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/michael-aloys\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/michael-aloys\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/michael-aloys\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/michael-aloys\/orgs","repos_url":"https:\/\/api.github.com\/users\/michael-aloys\/repos","events_url":"https:\/\/api.github.com\/users\/michael-aloys\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/michael-aloys\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T19:12:18Z","updated_at":"2020-12-10T11:27:41Z","closed_at":"2020-12-10T11:27:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1336","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1336","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1336.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1336.patch","merged_at":"2020-12-10T11:27:41Z"},"body":"Added new dataset Yoruba BBC Topic Classification\r\n\r\nContains loading script as well as dataset card including YAML tags.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1336\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1336\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1335","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1335\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1335\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1335\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1335","id":759705835,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjYzNzQ2","number":1335,"title":"Added Bianet dataset","user":{"login":"param087","id":26374564,"node_id":"MDQ6VXNlcjI2Mzc0NTY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26374564?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/param087","html_url":"https:\/\/github.com\/param087","followers_url":"https:\/\/api.github.com\/users\/param087\/followers","following_url":"https:\/\/api.github.com\/users\/param087\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/param087\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/param087\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/param087\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/param087\/orgs","repos_url":"https:\/\/api.github.com\/users\/param087\/repos","events_url":"https:\/\/api.github.com\/users\/param087\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/param087\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T19:10:32Z","updated_at":"2020-12-14T10:00:56Z","closed_at":"2020-12-14T10:00:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1335","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1335","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1335.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1335.patch","merged_at":"2020-12-14T10:00:55Z"},"body":"Hi :hugs:, This is a PR for [Bianet: A parallel news corpus in Turkish, Kurdish and English; Source](http:\/\/opus.nlpl.eu\/Bianet.php) dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1335\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1335\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1334","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1334\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1334\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1334\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1334","id":759699993,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjU5MDg2","number":1334,"title":"Add QED Amara Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T19:01:13Z","updated_at":"2020-12-10T11:17:25Z","closed_at":"2020-12-10T11:15:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1334","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1334","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1334.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1334.patch","merged_at":"2020-12-10T11:15:57Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1334\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1334\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1333","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1333\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1333\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1333\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1333","id":759687836,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjQ4OTI4","number":1333,"title":"Add Tanzil Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T18:45:15Z","updated_at":"2020-12-10T11:17:56Z","closed_at":"2020-12-10T11:14:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1333","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1333","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1333.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1333.patch","merged_at":"2020-12-10T11:14:43Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1333\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1333\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1332","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1332\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1332\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1332\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1332","id":759679135,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjQxOTE5","number":1332,"title":"Add Open Subtitles Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T18:31:45Z","updated_at":"2020-12-10T11:17:38Z","closed_at":"2020-12-10T11:13:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1332","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1332","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1332.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1332.patch","merged_at":"2020-12-10T11:13:18Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1332\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1332\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1331","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1331\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1331\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1331\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1331","id":759677189,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjQwMzc5","number":1331,"title":"First version of the new dataset hausa_voa_topics","user":{"login":"michael-aloys","id":1858628,"node_id":"MDQ6VXNlcjE4NTg2Mjg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1858628?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/michael-aloys","html_url":"https:\/\/github.com\/michael-aloys","followers_url":"https:\/\/api.github.com\/users\/michael-aloys\/followers","following_url":"https:\/\/api.github.com\/users\/michael-aloys\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/michael-aloys\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/michael-aloys\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/michael-aloys\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/michael-aloys\/orgs","repos_url":"https:\/\/api.github.com\/users\/michael-aloys\/repos","events_url":"https:\/\/api.github.com\/users\/michael-aloys\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/michael-aloys\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T18:28:52Z","updated_at":"2020-12-10T11:09:53Z","closed_at":"2020-12-10T11:09:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1331","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1331","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1331.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1331.patch","merged_at":"2020-12-10T11:09:53Z"},"body":"Contains loading script as well as dataset card including YAML tags.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1331\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1331\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1330","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1330\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1330\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1330\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1330","id":759657324,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjI0MzMx","number":1330,"title":"added un_ga dataset","user":{"login":"param087","id":26374564,"node_id":"MDQ6VXNlcjI2Mzc0NTY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26374564?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/param087","html_url":"https:\/\/github.com\/param087","followers_url":"https:\/\/api.github.com\/users\/param087\/followers","following_url":"https:\/\/api.github.com\/users\/param087\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/param087\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/param087\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/param087\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/param087\/orgs","repos_url":"https:\/\/api.github.com\/users\/param087\/repos","events_url":"https:\/\/api.github.com\/users\/param087\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/param087\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-08T17:58:38Z","updated_at":"2020-12-14T17:52:34Z","closed_at":"2020-12-14T17:52:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1330","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1330","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1330.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1330.patch","merged_at":null},"body":"Hi :hugs:, This is a PR for [United nations general assembly resolutions: A six-language parallel corpus](http:\/\/opus.nlpl.eu\/UN.php) dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1330\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1330\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1329","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1329\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1329\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1329\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1329","id":759654174,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjIxNzg0","number":1329,"title":"Add yoruba ner corpus","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T17:54:00Z","updated_at":"2020-12-08T23:11:12Z","closed_at":"2020-12-08T23:11:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1329","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1329","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1329.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1329.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1329\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1329\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1328","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1328\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1328\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1328\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1328","id":759634907,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjA2MDM1","number":1328,"title":"Added the NewsPH Raw dataset and corresponding dataset card","user":{"login":"jcblaisecruz02","id":24757547,"node_id":"MDQ6VXNlcjI0NzU3NTQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24757547?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jcblaisecruz02","html_url":"https:\/\/github.com\/jcblaisecruz02","followers_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/followers","following_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/orgs","repos_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/repos","events_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T17:25:45Z","updated_at":"2020-12-10T11:04:34Z","closed_at":"2020-12-10T11:04:34Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1328","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1328","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1328.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1328.patch","merged_at":"2020-12-10T11:04:34Z"},"body":"This PR adds the original NewsPH dataset which is used to autogenerate the NewsPH-NLI dataset. Reopened a new PR as the previous one had problems.\r\n\r\nPaper: https:\/\/arxiv.org\/abs\/2010.11574\r\nRepo: https:\/\/github.com\/jcblaisecruz02\/Filipino-Text-Benchmarks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1328\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1328\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1327","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1327\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1327\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1327\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1327","id":759629321,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NjAxNDM3","number":1327,"title":"Add msr_genomics_kbcomp dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T17:18:20Z","updated_at":"2020-12-08T18:18:32Z","closed_at":"2020-12-08T18:18:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1327","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1327","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1327.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1327.patch","merged_at":"2020-12-08T18:18:06Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1327\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1327\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1326","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1326\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1326\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1326\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1326","id":759611784,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTg2ODY4","number":1326,"title":"TEP: Tehran English-Persian parallel corpus","user":{"login":"spatil6","id":6419011,"node_id":"MDQ6VXNlcjY0MTkwMTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6419011?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/spatil6","html_url":"https:\/\/github.com\/spatil6","followers_url":"https:\/\/api.github.com\/users\/spatil6\/followers","following_url":"https:\/\/api.github.com\/users\/spatil6\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/spatil6\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/spatil6\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/spatil6\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/spatil6\/orgs","repos_url":"https:\/\/api.github.com\/users\/spatil6\/repos","events_url":"https:\/\/api.github.com\/users\/spatil6\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/spatil6\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T16:56:53Z","updated_at":"2020-12-19T14:55:03Z","closed_at":"2020-12-10T11:25:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1326","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1326","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1326.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1326.patch","merged_at":"2020-12-10T11:25:17Z"},"body":"TEP: Tehran English-Persian parallel corpus\r\nmore info : http:\/\/opus.nlpl.eu\/TEP.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1326\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1326\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1325","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1325\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1325\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1325\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1325","id":759595556,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTczNjM2","number":1325,"title":"Add humicroedit dataset","user":{"login":"saradhix","id":1351362,"node_id":"MDQ6VXNlcjEzNTEzNjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1351362?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/saradhix","html_url":"https:\/\/github.com\/saradhix","followers_url":"https:\/\/api.github.com\/users\/saradhix\/followers","following_url":"https:\/\/api.github.com\/users\/saradhix\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/saradhix\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/saradhix\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/saradhix\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/saradhix\/orgs","repos_url":"https:\/\/api.github.com\/users\/saradhix\/repos","events_url":"https:\/\/api.github.com\/users\/saradhix\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/saradhix\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-08T16:35:46Z","updated_at":"2020-12-17T17:59:09Z","closed_at":"2020-12-17T17:59:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1325","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1325","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1325.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1325.patch","merged_at":"2020-12-17T17:59:09Z"},"body":"Pull request for adding humicroedit dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1325\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1325\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1324","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1324\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1324\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1324\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1324","id":759587864,"node_id":"MDU6SXNzdWU3NTk1ODc4NjQ=","number":1324,"title":"\u2753 Sharing ElasticSearch indexed dataset ","user":{"login":"pietrolesci","id":61748653,"node_id":"MDQ6VXNlcjYxNzQ4NjUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/61748653?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pietrolesci","html_url":"https:\/\/github.com\/pietrolesci","followers_url":"https:\/\/api.github.com\/users\/pietrolesci\/followers","following_url":"https:\/\/api.github.com\/users\/pietrolesci\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pietrolesci\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pietrolesci\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pietrolesci\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pietrolesci\/orgs","repos_url":"https:\/\/api.github.com\/users\/pietrolesci\/repos","events_url":"https:\/\/api.github.com\/users\/pietrolesci\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pietrolesci\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-08T16:25:58Z","updated_at":"2020-12-22T07:50:56Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi there,\r\n\r\nFirst of all, thank you very much for this amazing library. Datasets have become my preferred data structure for basically everything I am currently doing.\r\n\r\n**Question:** I'm working with a dataset and I have an elasticsearch container running at localhost:9200. I added an elasticsearch index and I was wondering\r\n\r\n- how can I know where it has been saved?\r\n\r\n- how can I share the indexed dataset with others?\r\n\r\nI tried to dig into the docs, but could not find anything about that.\r\n\r\nThank you very much for your help.\r\n\r\nBest,\r\nPietro\r\n\r\nEdit: apologies for the wrong label","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1324\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1324\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1323","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1323\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1323\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1323\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1323","id":759581919,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTYyNDQ0","number":1323,"title":"Add CC-News dataset of English language articles","user":{"login":"vblagoje","id":458335,"node_id":"MDQ6VXNlcjQ1ODMzNQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/458335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vblagoje","html_url":"https:\/\/github.com\/vblagoje","followers_url":"https:\/\/api.github.com\/users\/vblagoje\/followers","following_url":"https:\/\/api.github.com\/users\/vblagoje\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vblagoje\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vblagoje\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vblagoje\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vblagoje\/orgs","repos_url":"https:\/\/api.github.com\/users\/vblagoje\/repos","events_url":"https:\/\/api.github.com\/users\/vblagoje\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vblagoje\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-08T16:18:15Z","updated_at":"2021-02-01T16:55:49Z","closed_at":"2021-02-01T16:55:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1323","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1323","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1323.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1323.patch","merged_at":"2021-02-01T16:55:49Z"},"body":"Adds [CC-News](https:\/\/commoncrawl.org\/2016\/10\/news-dataset-available\/) dataset. It contains 708241 English language news articles. Although each article has a language field these tags are not reliable. I've used Spacy language detection [pipeline](https:\/\/spacy.io\/universe\/project\/spacy-langdetect) to confirm that the article language is indeed English. \r\n\r\nThe prepared dataset is temporarily hosted on my private Google Storage [bucket](https:\/\/storage.googleapis.com\/hf_datasets\/cc_news.tar.gz). We can move it to HF storage and update this PR before merging. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1323\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1323\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1322","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1322\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1322\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1322\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1322","id":759576003,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTU3Njg3","number":1322,"title":"add indonlu benchmark datasets","user":{"login":"yasirabd","id":6518504,"node_id":"MDQ6VXNlcjY1MTg1MDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6518504?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yasirabd","html_url":"https:\/\/github.com\/yasirabd","followers_url":"https:\/\/api.github.com\/users\/yasirabd\/followers","following_url":"https:\/\/api.github.com\/users\/yasirabd\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yasirabd\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yasirabd\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yasirabd\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yasirabd\/orgs","repos_url":"https:\/\/api.github.com\/users\/yasirabd\/repos","events_url":"https:\/\/api.github.com\/users\/yasirabd\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yasirabd\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T16:10:58Z","updated_at":"2020-12-13T02:11:27Z","closed_at":"2020-12-13T01:54:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1322","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1322","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1322.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1322.patch","merged_at":null},"body":"The IndoNLU benchmark is a collection of resources for training, evaluating, and analyzing natural language understanding systems for the Indonesian language. There are 12 datasets in IndoNLU.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1322\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1322\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1321","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1321\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1321\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1321\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1321","id":759573610,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTU1Nzg1","number":1321,"title":"added dutch_social","user":{"login":"skyprince999","id":9033954,"node_id":"MDQ6VXNlcjkwMzM5NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9033954?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/skyprince999","html_url":"https:\/\/github.com\/skyprince999","followers_url":"https:\/\/api.github.com\/users\/skyprince999\/followers","following_url":"https:\/\/api.github.com\/users\/skyprince999\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/skyprince999\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/skyprince999\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/skyprince999\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/skyprince999\/orgs","repos_url":"https:\/\/api.github.com\/users\/skyprince999\/repos","events_url":"https:\/\/api.github.com\/users\/skyprince999\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/skyprince999\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-08T16:07:54Z","updated_at":"2020-12-16T10:14:17Z","closed_at":"2020-12-16T10:14:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1321","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1321","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1321.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1321.patch","merged_at":"2020-12-16T10:14:17Z"},"body":"The Dutch social media tweets dataset. Which has a total of more than 210k tweets in dutch language. These tweets have been machine annotated with sentiment scores (`label` feature) and `industry` and `hisco_codes`\r\n\r\nIt can be used for sentiment analysis, multi-label classification and entity tagging","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1321\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1321\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1320","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1320\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1320\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1320\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1320","id":759566148,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTUwMDM4","number":1320,"title":"Added the WikiText-TL39 dataset and corresponding card","user":{"login":"jcblaisecruz02","id":24757547,"node_id":"MDQ6VXNlcjI0NzU3NTQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24757547?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jcblaisecruz02","html_url":"https:\/\/github.com\/jcblaisecruz02","followers_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/followers","following_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/orgs","repos_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/repos","events_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T16:00:26Z","updated_at":"2020-12-10T11:24:53Z","closed_at":"2020-12-10T11:24:53Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1320","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1320","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1320.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1320.patch","merged_at":"2020-12-10T11:24:52Z"},"body":"This PR adds the WikiText-TL-39 Filipino Language Modeling dataset. Restarted a new pull request since there were problems with the earlier one.\r\n\r\nPaper: https:\/\/arxiv.org\/abs\/1907.00409\r\nRepo: https:\/\/github.com\/jcblaisecruz02\/Filipino-Text-Benchmarks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1320\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1320\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1319","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1319\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1319\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1319\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1319","id":759565923,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTQ5ODU5","number":1319,"title":"adding wili-2018 language identification dataset","user":{"login":"Shubhambindal2017","id":31540058,"node_id":"MDQ6VXNlcjMxNTQwMDU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31540058?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Shubhambindal2017","html_url":"https:\/\/github.com\/Shubhambindal2017","followers_url":"https:\/\/api.github.com\/users\/Shubhambindal2017\/followers","following_url":"https:\/\/api.github.com\/users\/Shubhambindal2017\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Shubhambindal2017\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Shubhambindal2017\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Shubhambindal2017\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Shubhambindal2017\/orgs","repos_url":"https:\/\/api.github.com\/users\/Shubhambindal2017\/repos","events_url":"https:\/\/api.github.com\/users\/Shubhambindal2017\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Shubhambindal2017\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-08T16:00:09Z","updated_at":"2020-12-14T21:20:32Z","closed_at":"2020-12-14T21:20:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1319","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1319","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1319.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1319.patch","merged_at":"2020-12-14T21:20:32Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1319\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1319\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1318","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1318\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1318\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1318\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1318","id":759565629,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTQ5NjE3","number":1318,"title":"ethos first commit","user":{"login":"iamollas","id":22838900,"node_id":"MDQ6VXNlcjIyODM4OTAw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22838900?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iamollas","html_url":"https:\/\/github.com\/iamollas","followers_url":"https:\/\/api.github.com\/users\/iamollas\/followers","following_url":"https:\/\/api.github.com\/users\/iamollas\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iamollas\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iamollas\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iamollas\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iamollas\/orgs","repos_url":"https:\/\/api.github.com\/users\/iamollas\/repos","events_url":"https:\/\/api.github.com\/users\/iamollas\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iamollas\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-08T15:59:47Z","updated_at":"2020-12-10T14:45:57Z","closed_at":"2020-12-10T14:45:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1318","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1318","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1318.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1318.patch","merged_at":null},"body":"Ethos passed all the tests except from this one: \r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_dataset_all_configs_\r\n\r\nwith this error: \r\nE OSError: Cannot find data file. \r\nE Original error:\r\nE [Errno 2] No such file or directory: ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1318\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1318\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1317","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1317\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1317\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1317\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1317","id":759553495,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTM5NTQ5","number":1317,"title":"add 10k German News Article Dataset","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-08T15:44:25Z","updated_at":"2021-09-17T16:55:51Z","closed_at":"2020-12-16T16:50:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1317","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1317","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1317.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1317.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1317\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1317\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1316","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1316\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1316\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1316\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1316","id":759549601,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTM2Mzc1","number":1316,"title":"Allow GitHub releases as dataset source","user":{"login":"benjaminvdb","id":8875786,"node_id":"MDQ6VXNlcjg4NzU3ODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8875786?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/benjaminvdb","html_url":"https:\/\/github.com\/benjaminvdb","followers_url":"https:\/\/api.github.com\/users\/benjaminvdb\/followers","following_url":"https:\/\/api.github.com\/users\/benjaminvdb\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/benjaminvdb\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/benjaminvdb\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/benjaminvdb\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/benjaminvdb\/orgs","repos_url":"https:\/\/api.github.com\/users\/benjaminvdb\/repos","events_url":"https:\/\/api.github.com\/users\/benjaminvdb\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/benjaminvdb\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T15:39:35Z","updated_at":"2020-12-10T10:12:00Z","closed_at":"2020-12-10T10:12:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1316","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1316","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1316.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1316.patch","merged_at":"2020-12-10T10:12:00Z"},"body":"# Summary\r\n\r\nProviding a GitHub release URL to `DownloadManager.download()` currently throws a `ConnectionError: Couldn't reach [DOWNLOAD_URL]`. This PR fixes this problem by adding an exception for GitHub releases in `datasets.utils.file_utils.get_from_cache()`.\r\n\r\n# Reproduce\r\n\r\n```\r\nimport datasets\r\nurl = 'http:\/\/github.com\/benjaminvdb\/DBRD\/releases\/download\/v3.0\/DBRD_v3.tgz'\r\nresult = datasets.utils.file_utils.get_from_cache(url)\r\n\r\n# Returns: ConnectionError: Couldn't reach http:\/\/github.com\/benjaminvdb\/DBRD\/releases\/download\/v3.0\/DBRD_v3.tgz\r\n```\r\n\r\n# Cause\r\n\r\nGitHub releases returns a HTTP status 403 (FOUND), indicating that the request is being redirected (to AWS S3, in this case). `get_from_cache()` checks whether the status is 200 (OK) or if it is part of two exceptions (Google Drive or Firebase), otherwise the mentioned error is thrown.\r\n\r\n# Solution\r\n\r\nJust like the exceptions for Google Drive and Firebase, add a condition for GitHub releases URLs that return the HTTP status 403. If this is the case, continue normally.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1316\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1316\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1315","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1315\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1315\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1315\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1315","id":759548706,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTM1NjM4","number":1315,"title":"add yelp_review_full","user":{"login":"hfawaz","id":29229602,"node_id":"MDQ6VXNlcjI5MjI5NjAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29229602?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hfawaz","html_url":"https:\/\/github.com\/hfawaz","followers_url":"https:\/\/api.github.com\/users\/hfawaz\/followers","following_url":"https:\/\/api.github.com\/users\/hfawaz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hfawaz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hfawaz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hfawaz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hfawaz\/orgs","repos_url":"https:\/\/api.github.com\/users\/hfawaz\/repos","events_url":"https:\/\/api.github.com\/users\/hfawaz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hfawaz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T15:38:27Z","updated_at":"2020-12-09T15:55:49Z","closed_at":"2020-12-09T15:55:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1315","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1315","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1315.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1315.patch","merged_at":"2020-12-09T15:55:48Z"},"body":"This corresponds to the Yelp-5 requested in https:\/\/github.com\/huggingface\/datasets\/issues\/353\r\nI included the dataset card. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1315\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1315\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1314","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1314\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1314\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1314\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1314","id":759541937,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTMwMDE5","number":1314,"title":"Add snips built in intents 2016 12","user":{"login":"bduvenhage","id":8405335,"node_id":"MDQ6VXNlcjg0MDUzMzU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8405335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bduvenhage","html_url":"https:\/\/github.com\/bduvenhage","followers_url":"https:\/\/api.github.com\/users\/bduvenhage\/followers","following_url":"https:\/\/api.github.com\/users\/bduvenhage\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bduvenhage\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bduvenhage\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bduvenhage\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bduvenhage\/orgs","repos_url":"https:\/\/api.github.com\/users\/bduvenhage\/repos","events_url":"https:\/\/api.github.com\/users\/bduvenhage\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bduvenhage\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-08T15:30:19Z","updated_at":"2020-12-14T09:59:07Z","closed_at":"2020-12-14T09:59:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1314","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1314","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1314.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1314.patch","merged_at":"2020-12-14T09:59:06Z"},"body":"This PR proposes to add the Snips.ai built in intents dataset. The first configuration added is for the intent labels only, but the dataset includes entity slots that may in future be added as alternate configurations.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1314\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1314\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1313","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1313\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1313\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1313\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1313","id":759536512,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTI1NjE3","number":1313,"title":"Add HateSpeech Corpus for Polish","user":{"login":"kacperlukawski","id":2649301,"node_id":"MDQ6VXNlcjI2NDkzMDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2649301?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kacperlukawski","html_url":"https:\/\/github.com\/kacperlukawski","followers_url":"https:\/\/api.github.com\/users\/kacperlukawski\/followers","following_url":"https:\/\/api.github.com\/users\/kacperlukawski\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kacperlukawski\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kacperlukawski\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kacperlukawski\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kacperlukawski\/orgs","repos_url":"https:\/\/api.github.com\/users\/kacperlukawski\/repos","events_url":"https:\/\/api.github.com\/users\/kacperlukawski\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kacperlukawski\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-08T15:23:53Z","updated_at":"2020-12-16T16:48:45Z","closed_at":"2020-12-16T16:48:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1313","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1313","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1313.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1313.patch","merged_at":"2020-12-16T16:48:45Z"},"body":"This PR adds a HateSpeech Corpus for Polish, containing offensive language examples.\r\n\r\n- **Homepage:** http:\/\/zil.ipipan.waw.pl\/HateSpeech\r\n- **Paper:** http:\/\/www.qualitativesociologyreview.org\/PL\/Volume38\/PSJ_13_2_Troszynski_Wawer.pdf","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1313\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1313\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1312","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1312\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1312\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1312\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1312","id":759532626,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTIyMzc1","number":1312,"title":"Jigsaw toxicity pred","user":{"login":"taihim","id":13764071,"node_id":"MDQ6VXNlcjEzNzY0MDcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13764071?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/taihim","html_url":"https:\/\/github.com\/taihim","followers_url":"https:\/\/api.github.com\/users\/taihim\/followers","following_url":"https:\/\/api.github.com\/users\/taihim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/taihim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/taihim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/taihim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/taihim\/orgs","repos_url":"https:\/\/api.github.com\/users\/taihim\/repos","events_url":"https:\/\/api.github.com\/users\/taihim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/taihim\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T15:19:14Z","updated_at":"2020-12-11T12:11:32Z","closed_at":"2020-12-11T12:11:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1312","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1312","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1312.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1312.patch","merged_at":null},"body":"Requires manually downloading data from Kaggle.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1312\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1312\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1311","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1311\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1311\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1311\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1311","id":759514819,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTA3NjM1","number":1311,"title":"Add OPUS Bible Corpus (102 Languages)","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T14:57:08Z","updated_at":"2020-12-09T15:30:57Z","closed_at":"2020-12-09T15:30:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1311","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1311","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1311.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1311.patch","merged_at":"2020-12-09T15:30:56Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1311\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1311\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1310","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1310\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1310\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1310\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1310","id":759508921,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NTAyNzE5","number":1310,"title":"Add OffensEval-TR 2020 Dataset","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-08T14:49:51Z","updated_at":"2020-12-12T14:15:42Z","closed_at":"2020-12-09T16:02:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1310","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1310","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1310.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1310.patch","merged_at":"2020-12-09T16:02:06Z"},"body":"This PR adds the OffensEval-TR 2020 dataset which is a Turkish offensive language corpus by me and @basakbuluz. The corpus consist of randomly sampled tweets and annotated in a similar way to [OffensEval](https:\/\/sites.google.com\/site\/offensevalsharedtask\/) and [GermEval](https:\/\/projects.fzai.h-da.de\/iggsa\/).\r\n\r\n- **Homepage:** [offensive-turkish](https:\/\/coltekin.github.io\/offensive-turkish\/)\r\n- **Paper:** [A Corpus of Turkish Offensive Language on Social Media](https:\/\/coltekin.github.io\/offensive-turkish\/troff.pdf)\r\n- **Point of Contact:** [\u00c7a\u011fr\u0131 \u00c7\u00f6ltekin](ccoltekin@sfs.uni-tuebingen.de)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1310\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1310\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1309","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1309\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1309\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1309\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1309","id":759501370,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDk2NTYx","number":1309,"title":"Add SAMSum Corpus dataset","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-08T14:40:56Z","updated_at":"2020-12-14T12:32:33Z","closed_at":"2020-12-14T10:20:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1309","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1309","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1309.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1309.patch","merged_at":"2020-12-14T10:20:55Z"},"body":"Did not spent much time writing README, might update later.\r\n\r\nCopied description and some stuff from tensorflow_datasets\r\nhttps:\/\/github.com\/tensorflow\/datasets\/blob\/master\/tensorflow_datasets\/summarization\/samsum.py","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1309\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1309\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1308","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1308\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1308\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1308\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1308","id":759492953,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDg5Nzcw","number":1308,"title":"Add Wiki Lingua Dataset","user":{"login":"katnoria","id":7674948,"node_id":"MDQ6VXNlcjc2NzQ5NDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7674948?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/katnoria","html_url":"https:\/\/github.com\/katnoria","followers_url":"https:\/\/api.github.com\/users\/katnoria\/followers","following_url":"https:\/\/api.github.com\/users\/katnoria\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/katnoria\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/katnoria\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/katnoria\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/katnoria\/orgs","repos_url":"https:\/\/api.github.com\/users\/katnoria\/repos","events_url":"https:\/\/api.github.com\/users\/katnoria\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/katnoria\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-08T14:30:13Z","updated_at":"2020-12-14T10:39:52Z","closed_at":"2020-12-14T10:39:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1308","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1308","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1308.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1308.patch","merged_at":null},"body":"Hello,\r\n\r\nThis is my first PR. \r\n\r\nI have added Wiki Lingua Dataset along with dataset card to the best of my knowledge.\r\nThere was one hiccup though. I was unable to create dummy data because the data is in pkl format.\r\nFrom the document, I see that:\r\n```At the moment it supports data files in the following format: txt, csv, tsv, jsonl, json, xml```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1308\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1308\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1307","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1307\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1307\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1307\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1307","id":759458835,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDYxODc5","number":1307,"title":"adding capes","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T13:46:13Z","updated_at":"2020-12-09T15:40:09Z","closed_at":"2020-12-09T15:27:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1307","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1307","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1307.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1307.patch","merged_at":"2020-12-09T15:27:45Z"},"body":"Adding Parallel corpus of theses and dissertation abstracts in Portuguese and English from CAPES\r\nhttps:\/\/sites.google.com\/view\/felipe-soares\/datasets#h.p_kxOR6EhHm2a6","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1307\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1307\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1306","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1306\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1306\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1306\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1306","id":759448427,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDUzMTU1","number":1306,"title":"add W&I + LOCNESS dataset (BEA-2019 workshop shared task on GEC)","user":{"login":"aseifert","id":4944799,"node_id":"MDQ6VXNlcjQ5NDQ3OTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944799?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aseifert","html_url":"https:\/\/github.com\/aseifert","followers_url":"https:\/\/api.github.com\/users\/aseifert\/followers","following_url":"https:\/\/api.github.com\/users\/aseifert\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aseifert\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aseifert\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aseifert\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aseifert\/orgs","repos_url":"https:\/\/api.github.com\/users\/aseifert\/repos","events_url":"https:\/\/api.github.com\/users\/aseifert\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aseifert\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T13:31:34Z","updated_at":"2020-12-10T09:53:54Z","closed_at":"2020-12-10T09:53:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1306","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1306","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1306.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1306.patch","merged_at":null},"body":"- **Name:** W&I + LOCNESS dataset (from the BEA-2019 workshop shared task on GEC)\r\n- **Description:** https:\/\/www.cl.cam.ac.uk\/research\/nl\/bea2019st\/#data\r\n- **Paper:** https:\/\/www.aclweb.org\/anthology\/W19-4406\/\r\n- **Motivation:** This is a recent dataset (actually two in one) for grammatical error correction and is used for benchmarking in this field of NLP.\r\n\r\n### Checkbox\r\n\r\n- [x] Create the dataset script `\/datasets\/my_dataset\/my_dataset.py` using the template\r\n- [x] Fill the `_DESCRIPTION` and `_CITATION` variables\r\n- [x] Implement `_infos()`, `_split_generators()` and `_generate_examples()`\r\n- [x] Make sure that the `BUILDER_CONFIGS` class attribute is filled with the different configurations of the dataset and that the `BUILDER_CONFIG_CLASS` is specified if there is a custom config class.\r\n- [x] Generate the metadata file `dataset_infos.json` for all configurations\r\n- [x] Generate the dummy data `dummy_data.zip` files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card `README.md` using the template : fill the tags and the various paragraphs\r\n- [x] Both tests for the real data and the dummy data pass.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1306\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1306\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1305","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1305\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1305\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1305\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1305","id":759446665,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDUxNzEx","number":1305,"title":"[README] Added Windows command to enable slow tests","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T13:29:04Z","updated_at":"2020-12-08T13:56:33Z","closed_at":"2020-12-08T13:56:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1305","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1305","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1305.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1305.patch","merged_at":"2020-12-08T13:56:32Z"},"body":"The Windows command to run slow tests has caused issues, so this adds a functional Windows command.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1305\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1305\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1304","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1304\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1304\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1304\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1304","id":759440841,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDQ2Nzcy","number":1304,"title":"adding eitb_parcc","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T13:20:54Z","updated_at":"2020-12-09T18:02:54Z","closed_at":"2020-12-09T18:02:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1304","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1304","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1304.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1304.patch","merged_at":"2020-12-09T18:02:03Z"},"body":"Adding EiTB-ParCC: Parallel Corpus of Comparable News\r\nhttp:\/\/opus.nlpl.eu\/EiTB-ParCC.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1304\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1304\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1303","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1303\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1303\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1303\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1303","id":759440484,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDQ2NDg0","number":1303,"title":"adding opus_openoffice","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T13:20:21Z","updated_at":"2020-12-10T09:37:10Z","closed_at":"2020-12-10T09:37:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1303","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1303","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1303.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1303.patch","merged_at":"2020-12-10T09:37:10Z"},"body":"Adding Opus OpenOffice: http:\/\/opus.nlpl.eu\/OpenOffice.php\r\n8 languages, 28 bitexts","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1303\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1303\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1302","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1302\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1302\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1302\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1302","id":759435740,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDQyNTA0","number":1302,"title":"Add Danish NER dataset","user":{"login":"ophelielacroix","id":28562991,"node_id":"MDQ6VXNlcjI4NTYyOTkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28562991?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ophelielacroix","html_url":"https:\/\/github.com\/ophelielacroix","followers_url":"https:\/\/api.github.com\/users\/ophelielacroix\/followers","following_url":"https:\/\/api.github.com\/users\/ophelielacroix\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ophelielacroix\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ophelielacroix\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ophelielacroix\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ophelielacroix\/orgs","repos_url":"https:\/\/api.github.com\/users\/ophelielacroix\/repos","events_url":"https:\/\/api.github.com\/users\/ophelielacroix\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ophelielacroix\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T13:13:54Z","updated_at":"2020-12-10T09:35:26Z","closed_at":"2020-12-10T09:35:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1302","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1302","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1302.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1302.patch","merged_at":"2020-12-10T09:35:26Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1302\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1302\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1301","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1301\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1301\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1301\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1301","id":759419945,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDI5MjAy","number":1301,"title":"arxiv dataset added","user":{"login":"tanmoyio","id":33005287,"node_id":"MDQ6VXNlcjMzMDA1Mjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33005287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tanmoyio","html_url":"https:\/\/github.com\/tanmoyio","followers_url":"https:\/\/api.github.com\/users\/tanmoyio\/followers","following_url":"https:\/\/api.github.com\/users\/tanmoyio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tanmoyio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tanmoyio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tanmoyio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tanmoyio\/orgs","repos_url":"https:\/\/api.github.com\/users\/tanmoyio\/repos","events_url":"https:\/\/api.github.com\/users\/tanmoyio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tanmoyio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-08T12:50:51Z","updated_at":"2020-12-09T18:05:16Z","closed_at":"2020-12-09T18:05:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1301","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1301","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1301.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1301.patch","merged_at":"2020-12-09T18:05:16Z"},"body":"**adding arXiv dataset**: arXiv dataset and metadata of 1.7M+ scholarly papers across STEM\r\ndataset link: https:\/\/www.kaggle.com\/Cornell-University\/arxiv","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1301\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1301\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1300","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1300\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1300\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1300\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1300","id":759418122,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDI3Njk1","number":1300,"title":"added dutch_social","user":{"login":"skyprince999","id":9033954,"node_id":"MDQ6VXNlcjkwMzM5NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9033954?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/skyprince999","html_url":"https:\/\/github.com\/skyprince999","followers_url":"https:\/\/api.github.com\/users\/skyprince999\/followers","following_url":"https:\/\/api.github.com\/users\/skyprince999\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/skyprince999\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/skyprince999\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/skyprince999\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/skyprince999\/orgs","repos_url":"https:\/\/api.github.com\/users\/skyprince999\/repos","events_url":"https:\/\/api.github.com\/users\/skyprince999\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/skyprince999\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T12:47:50Z","updated_at":"2020-12-08T16:09:05Z","closed_at":"2020-12-08T16:09:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1300","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1300","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1300.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1300.patch","merged_at":null},"body":"WIP \r\nAs some tests did not clear! \ud83d\udc4e\ud83c\udffc ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1300\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1300\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1299","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1299\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1299\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1299\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1299","id":759414566,"node_id":"MDU6SXNzdWU3NTk0MTQ1NjY=","number":1299,"title":"can't load \"german_legal_entity_recognition\" dataset","user":{"login":"nataly-obr","id":59837137,"node_id":"MDQ6VXNlcjU5ODM3MTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59837137?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nataly-obr","html_url":"https:\/\/github.com\/nataly-obr","followers_url":"https:\/\/api.github.com\/users\/nataly-obr\/followers","following_url":"https:\/\/api.github.com\/users\/nataly-obr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nataly-obr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nataly-obr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nataly-obr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nataly-obr\/orgs","repos_url":"https:\/\/api.github.com\/users\/nataly-obr\/repos","events_url":"https:\/\/api.github.com\/users\/nataly-obr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nataly-obr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-08T12:42:01Z","updated_at":"2020-12-16T16:03:13Z","closed_at":"2020-12-16T16:03:13Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"FileNotFoundError: Couldn't find file locally at german_legal_entity_recognition\/german_legal_entity_recognition.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/german_legal_entity_recognition\/german_legal_entity_recognition.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/german_legal_entity_recognition\/german_legal_entity_recognition.py\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1299\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1299\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1298","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1298\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1298\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1298\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1298","id":759412451,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDIyODQy","number":1298,"title":"Add OPUS Ted Talks 2013","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T12:38:38Z","updated_at":"2020-12-16T16:57:50Z","closed_at":"2020-12-16T16:57:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1298","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1298","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1298.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1298.patch","merged_at":"2020-12-16T16:57:49Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1298\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1298\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1297","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1297\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1297\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1297\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1297","id":759404103,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0NDE1ODMx","number":1297,"title":"OPUS Ted Talks 2013","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T12:25:39Z","updated_at":"2020-12-08T12:35:50Z","closed_at":"2020-12-08T12:35:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1297","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1297","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1297.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1297.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1297\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1297\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1296","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1296\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1296\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1296\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1296","id":759375292,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MzkxNzQ1","number":1296,"title":"The Snips Built In Intents 2016 dataset.","user":{"login":"bduvenhage","id":8405335,"node_id":"MDQ6VXNlcjg0MDUzMzU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8405335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bduvenhage","html_url":"https:\/\/github.com\/bduvenhage","followers_url":"https:\/\/api.github.com\/users\/bduvenhage\/followers","following_url":"https:\/\/api.github.com\/users\/bduvenhage\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bduvenhage\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bduvenhage\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bduvenhage\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bduvenhage\/orgs","repos_url":"https:\/\/api.github.com\/users\/bduvenhage\/repos","events_url":"https:\/\/api.github.com\/users\/bduvenhage\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bduvenhage\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-08T11:40:10Z","updated_at":"2020-12-08T15:27:52Z","closed_at":"2020-12-08T15:27:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1296","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1296","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1296.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1296.patch","merged_at":null},"body":"This PR proposes to add the Snips.ai built in intents dataset. The first configuration added is for the intent labels only, but the dataset includes entity slots that may in future be added as alternate configurations.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1296\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1296\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1295","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1295\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1295\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1295\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1295","id":759375251,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MzkxNzE1","number":1295,"title":"add hrenwac_para","user":{"login":"IvanZidov","id":11391118,"node_id":"MDQ6VXNlcjExMzkxMTE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11391118?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/IvanZidov","html_url":"https:\/\/github.com\/IvanZidov","followers_url":"https:\/\/api.github.com\/users\/IvanZidov\/followers","following_url":"https:\/\/api.github.com\/users\/IvanZidov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/IvanZidov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/IvanZidov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/IvanZidov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/IvanZidov\/orgs","repos_url":"https:\/\/api.github.com\/users\/IvanZidov\/repos","events_url":"https:\/\/api.github.com\/users\/IvanZidov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/IvanZidov\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T11:40:06Z","updated_at":"2020-12-11T17:42:20Z","closed_at":"2020-12-11T17:42:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1295","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1295","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1295.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1295.patch","merged_at":"2020-12-11T17:42:20Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1295\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1295\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1294","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1294\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1294\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1294\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1294","id":759365246,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MzgzMjg5","number":1294,"title":"adding opus_euconst","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T11:24:16Z","updated_at":"2020-12-08T18:44:20Z","closed_at":"2020-12-08T18:41:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1294","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1294","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1294.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1294.patch","merged_at":"2020-12-08T18:41:22Z"},"body":"Adding EUconst, a parallel corpus collected from the European Constitution.\r\n21 languages, 210 bitexts","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1294\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1294\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1293","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1293\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1293\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1293\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1293","id":759360113,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0Mzc4OTQ0","number":1293,"title":"add hrenwac_para","user":{"login":"ivan-zidov","id":51969305,"node_id":"MDQ6VXNlcjUxOTY5MzA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/51969305?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ivan-zidov","html_url":"https:\/\/github.com\/ivan-zidov","followers_url":"https:\/\/api.github.com\/users\/ivan-zidov\/followers","following_url":"https:\/\/api.github.com\/users\/ivan-zidov\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ivan-zidov\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ivan-zidov\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ivan-zidov\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ivan-zidov\/orgs","repos_url":"https:\/\/api.github.com\/users\/ivan-zidov\/repos","events_url":"https:\/\/api.github.com\/users\/ivan-zidov\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ivan-zidov\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T11:16:41Z","updated_at":"2020-12-08T11:34:47Z","closed_at":"2020-12-08T11:34:38Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1293","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1293","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1293.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1293.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1293\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1293\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1292","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1292\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1292\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1292\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1292","id":759354627,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0Mzc0MzQ3","number":1292,"title":"arXiv dataset added","user":{"login":"tanmoyio","id":33005287,"node_id":"MDQ6VXNlcjMzMDA1Mjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33005287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tanmoyio","html_url":"https:\/\/github.com\/tanmoyio","followers_url":"https:\/\/api.github.com\/users\/tanmoyio\/followers","following_url":"https:\/\/api.github.com\/users\/tanmoyio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tanmoyio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tanmoyio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tanmoyio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tanmoyio\/orgs","repos_url":"https:\/\/api.github.com\/users\/tanmoyio\/repos","events_url":"https:\/\/api.github.com\/users\/tanmoyio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tanmoyio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T11:08:28Z","updated_at":"2020-12-08T14:02:13Z","closed_at":"2020-12-08T14:02:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1292","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1292","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1292.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1292.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1292\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1292\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1291","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1291\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1291\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1291\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1291","id":759352810,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MzcyNzk2","number":1291,"title":"adding pubmed_qa dataset","user":{"login":"tuner007","id":46425391,"node_id":"MDQ6VXNlcjQ2NDI1Mzkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46425391?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tuner007","html_url":"https:\/\/github.com\/tuner007","followers_url":"https:\/\/api.github.com\/users\/tuner007\/followers","following_url":"https:\/\/api.github.com\/users\/tuner007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tuner007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tuner007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tuner007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tuner007\/orgs","repos_url":"https:\/\/api.github.com\/users\/tuner007\/repos","events_url":"https:\/\/api.github.com\/users\/tuner007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tuner007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T11:05:44Z","updated_at":"2020-12-09T08:54:50Z","closed_at":"2020-12-09T08:54:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1291","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1291","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1291.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1291.patch","merged_at":"2020-12-09T08:54:50Z"},"body":"Pubmed QA dataset:\r\nPQA-L(abeled) 1k\r\nPQA-U(labeled) 61.2k\r\nPQA-A(rtifical labeled) 211.3k","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1291\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1291\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1290","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1290\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1290\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1290\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1290","id":759339989,"node_id":"MDU6SXNzdWU3NTkzMzk5ODk=","number":1290,"title":"imdb dataset cannot be downloaded","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-08T10:47:36Z","updated_at":"2020-12-24T17:38:09Z","closed_at":"2020-12-24T17:38:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"hi\r\nplease find error below getting imdb train spli:\r\nthanks\r\n\r\n`\r\ndatasets.load_dataset>>> datasets.load_dataset(\"imdb\", split=\"train\")`\r\n\r\n\r\nerrors\r\n\r\n\r\n```\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home_1\/datasets\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home_1\/datasets\r\nDownloading and preparing dataset imdb\/plain_text (download: 80.23 MiB, generated: 127.06 MiB, post-processed: Unknown size, total: 207.28 MiB) to \/idiap\/temp\/rkarimi\/cache_home_1\/datasets\/imdb\/plain_text\/1.0.0\/90099cb476936b753383ba2ae6ab2eae419b2e87f71cd5189cb9c8e5814d12a3...\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home_1\/datasets\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home_1\/datasets\/downloads\r\nTraceback (most recent call last): \r\n File \"\", line 1, in \r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 558, in _download_and_prepare\r\n verify_splits(self.info.splits, split_dict)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/info_utils.py\", line 73, in verify_splits\r\n raise NonMatchingSplitsSizesError(str(bad_splits))\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='unsupervised', num_bytes=67125548, num_examples=50000, dataset_name='imdb'), 'recorded': SplitInfo(name='unsupervised', num_bytes=7486451, num_examples=5628, dataset_name='imdb')}]\r\n\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1290\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1290\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1289","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1289\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1289\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1289\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1289","id":759333684,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MzU2ODUw","number":1289,"title":"Jigsaw toxicity classification dataset added","user":{"login":"taihim","id":13764071,"node_id":"MDQ6VXNlcjEzNzY0MDcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13764071?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/taihim","html_url":"https:\/\/github.com\/taihim","followers_url":"https:\/\/api.github.com\/users\/taihim\/followers","following_url":"https:\/\/api.github.com\/users\/taihim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/taihim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/taihim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/taihim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/taihim\/orgs","repos_url":"https:\/\/api.github.com\/users\/taihim\/repos","events_url":"https:\/\/api.github.com\/users\/taihim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/taihim\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T10:38:51Z","updated_at":"2020-12-08T15:17:48Z","closed_at":"2020-12-08T15:17:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1289","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1289","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1289.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1289.patch","merged_at":null},"body":"The dataset requires manually downloading data from Kaggle.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1289\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1289\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1288","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1288\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1288\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1288\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1288","id":759309457,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MzM2Mzgz","number":1288,"title":"Add CodeSearchNet corpus dataset","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T10:07:50Z","updated_at":"2020-12-09T17:05:28Z","closed_at":"2020-12-09T17:05:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1288","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1288","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1288.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1288.patch","merged_at":"2020-12-09T17:05:27Z"},"body":"This PR adds the CodeSearchNet corpus proxy dataset for semantic code search: https:\/\/github.com\/github\/CodeSearchNet\r\nI have had a few issues, mentioned below. Would appreciate some help on how to solve them.\r\n\r\n## Issues generating dataset card\r\nIs there something wrong with my declaration of the dataset features ?\r\n```\r\nfeatures=datasets.Features(\r\n {\r\n \"repository_name\": datasets.Value(\"string\"),\r\n \"func_path_in_repository\": datasets.Value(\"string\"),\r\n \"func_name\": datasets.Value(\"string\"),\r\n \"whole_func_string\": datasets.Value(\"string\"),\r\n \"language\": datasets.Value(\"string\"),\r\n \"func_code_string\": datasets.Value(\"string\"),\r\n \"func_code_tokens\": datasets.Sequence(datasets.Value(\"string\")),\r\n \"func_documentation_string\": datasets.Value(\"string\"),\r\n \"func_documentation_tokens\": datasets.Sequence(datasets.Value(\"string\")),\r\n \"split_name\": datasets.Value(\"string\"),\r\n \"func_code_url\": datasets.Value(\"string\"),\r\n # TODO - add licensing info in the examples\r\n }\r\n),\r\n\r\n```\r\nWhen running the streamlite app for tagging the dataset on my machine, I get the following error :\r\n![image](https:\/\/user-images.githubusercontent.com\/33657802\/101469132-9ed12c80-3944-11eb-94ff-2d9c1d0ea080.png)\r\n\r\n\r\n## Issues with dummy data\r\nDue to the unusual structure of the data, I have been unable to generate dummy data automatically.\r\nI tried to generate it manually, but pytests fail when using the manually-generated dummy data ! Pytests work fine when using the real data.\r\n```\r\n============================================================================================== test session starts ==============================================================================================\r\nplatform linux -- Python 3.7.9, pytest-6.1.2, py-1.9.0, pluggy-0.13.1\r\nplugins: xdist-2.1.0, forked-1.3.0\r\ncollected 1 item\r\n\r\ntests\/test_dataset_common.py F [100%]\r\n\r\n=================================================================================================== FAILURES ====================================================================================================\r\n________________________________________________________________________ LocalDatasetTest.test_load_dataset_all_configs_code_search_net _________________________________________________________________________\r\nself = , dataset_name = 'code_search_net'\r\n\r\n @slow\r\n def test_load_dataset_all_configs(self, dataset_name):\r\n configs = self.dataset_tester.load_all_configs(dataset_name, is_local=True)\r\n> self.dataset_tester.check_load_dataset(dataset_name, configs, is_local=True)\r\n\r\ntests\/test_dataset_common.py:237:\r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\r\ntests\/test_dataset_common.py:198: in check_load_dataset\r\n self.parent.assertTrue(len(dataset[split]) > 0)\r\nE AssertionError: False is not true\r\n--------------------------------------------------------------------------------------------- Captured stdout call ----------------------------------------------------------------------------------------------\r\nDownloading and preparing dataset code_search_net\/all (download: 1.00 MiB, generated: 1.00 MiB, post-processed: Unknown size, total: 2.00 MiB) to \/tmp\/tmppx78sj24\/code_search_net\/all\/1.0.0...\r\nDataset code_search_net downloaded and prepared to \/tmp\/tmppx78sj24\/code_search_net\/all\/1.0.0. Subsequent calls will reuse this data.\r\n--------------------------------------------------------------------------------------------- Captured stderr call ----------------------------------------------------------------------------------------------\r\n... (irrelevant info - Deprecation warnings)\r\n============================================================================================ short test summary info ============================================================================================\r\nFAILED tests\/test_dataset_common.py::LocalDatasetTest::test_load_dataset_all_configs_code_search_net - AssertionError: False is not true\r\n========================================================================================= 1 failed, 4 warnings in 3.00s ========================================================================================\r\n```\r\n\r\n\r\n## Note : Data structure in S3\r\nThe data is stored on S3, and organized by programming languages.\r\nIt is stored in the following repository structure:\r\n```\r\n.\r\n\u251c\u2500\u2500 # e.g. python\r\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 final\r\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 jsonl\r\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 test\r\n\u2502\u00a0\u00a0 \u2502\u00a0\u00a0 \u2514\u2500\u2500 _test_0.jsonl.gz\r\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 train\r\n\u2502\u00a0\u00a0 \u2502\u00a0\u00a0 \u251c\u2500\u2500 _train_0.jsonl.gz\r\n\u2502\u00a0\u00a0 \u2502\u00a0\u00a0 \u251c\u2500\u2500 _train_1.jsonl.gz\r\n\u2502\u00a0\u00a0 \u2502\u00a0\u00a0 \u251c\u2500\u2500 ...\r\n\u2502\u00a0\u00a0 \u2502\u00a0\u00a0 \u2514\u2500\u2500 _train_n.jsonl.gz\r\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 valid\r\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 _valid_0.jsonl.gz\r\n\u251c\u2500\u2500 _dedupe_definitions_v2.pkl\r\n\u2514\u2500\u2500 _licenses.pkl\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1288\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1288\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1287","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1287\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1287\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1287\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1287","id":759300992,"node_id":"MDU6SXNzdWU3NTkzMDA5OTI=","number":1287,"title":"'iwslt2017-ro-nl', cannot be downloaded ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-08T09:56:55Z","updated_at":"2020-12-15T18:24:54Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am trying \r\n`>>> datasets.load_dataset(\"iwslt2017\", 'iwslt2017-ro-nl', split=\"train\")`\r\n\r\ngetting this error thank you for your help\r\n```\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home_1\/datasets\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home_1\/datasets\r\nDownloading and preparing dataset iwsl_t217\/iwslt2017-ro-nl (download: 314.07 MiB, generated: 39.92 MiB, post-processed: Unknown size, total: 354.00 MiB) to \/idiap\/temp\/rkarimi\/cache_home_1\/datasets\/iwsl_t217\/iwslt2017-ro-nl\/1.0.0\/cca6935a0851a8ceac1202a62c958738bdfa23c57a51bc52ac1c5ebd2aa172cd...\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home_1\/datasets\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home_1\/datasets\/downloads\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 531, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \" \/idiap\/home\/rkarimi\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/iwslt2017\/cca6935a0851a8ceac1202a62c958738bdfa23c57a51bc52ac1c5ebd2aa172cd\/iwslt2017.py\", line 118, in _split_generators\r\n dl_dir = dl_manager.download_and_extract(MULTI_URL)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 254, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 179, in download\r\n num_proc=download_config.num_proc,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 216, in map_nested\r\n return function(data_struct)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 477, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/wit3.fbk.eu\/archive\/2017-01-trnmted\/\/texts\/DeEnItNlRo\/DeEnItNlRo\/DeEnItNlRo-DeEnItNlRo.tgz\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1287\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1287\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1286","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1286\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1286\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1286\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1286","id":759291509,"node_id":"MDU6SXNzdWU3NTkyOTE1MDk=","number":1286,"title":"[libprotobuf FATAL \/sentencepiece\/src\/..\/third_party\/protobuf-lite\/google\/protobuf\/repeated_field.h:1505] CHECK failed: (index) >= (0): terminate called after throwing an instance of 'google::protobuf::FatalException' what(): CHECK failed: (index) >= (0): Aborted","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-08T09:44:15Z","updated_at":"2020-12-12T19:36:22Z","closed_at":"2020-12-12T16:22:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am getting this error when evaluating on wmt16-ro-en using finetune_trainer.py of huggingface repo. thank for your help\r\n\r\n{'epoch': 20.0} \r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 20\/20 [00:16<00:00, 1.22it\/s]\r\n12\/08\/2020 10:41:19 - INFO - seq2seq.trainers.trainer - Saving model checkpoint to outputs\/experiment\/joint\/finetune\/lr-2e-5\r\n12\/08\/2020 10:41:24 - INFO - __main__ - {'wmt16-en-ro': Dataset(features: {'src_texts': Value(dtype='string', id=None), 'task': Value(dtype='string', id=None), 'tgt_texts': Value(dtype='string', id=None)}, num_rows: 1998), 'qnli': Dataset(features: {'src_texts': Value(dtype='string', id=None), 'task': Value(dtype='string', id=None), 'tgt_texts': Value(dtype='string', id=None)}, num_rows: 5462), 'scitail': Dataset(features: {'src_texts': Value(dtype='string', id=None), 'task': Value(dtype='string', id=None), 'tgt_texts': Value(dtype='string', id=None)}, num_rows: 1303)}\r\n12\/08\/2020 10:41:24 - INFO - __main__ - *** Evaluate ***\r\n12\/08\/2020 10:41:24 - INFO - seq2seq.utils.utils - using task specific params for wmt16-en-ro: {'max_length': 300, 'num_beams': 4}\r\n12\/08\/2020 10:41:24 - INFO - seq2seq.trainers.trainer - ***** Running Evaluation *****\r\n12\/08\/2020 10:41:24 - INFO - seq2seq.trainers.trainer - Num examples = 1998\r\n12\/08\/2020 10:41:24 - INFO - seq2seq.trainers.trainer - Batch size = 64\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 32\/32 [00:37<00:00, 1.19s\/it][libprotobuf FATAL \/sentencepiece\/src\/..\/third_party\/protobuf-lite\/google\/protobuf\/repeated_field.h:1505] CHECK failed: (index) >= (0): \r\nterminate called after throwing an instance of 'google::protobuf::FatalException'\r\n what(): CHECK failed: (index) >= (0): \r\nAborted\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1286\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1286\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1285","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1285\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1285\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1285\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1285","id":759278758,"node_id":"MDU6SXNzdWU3NTkyNzg3NTg=","number":1285,"title":"boolq does not work ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-08T09:28:47Z","updated_at":"2020-12-08T09:47:10Z","closed_at":"2020-12-08T09:47:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am getting this error when trying to load boolq, thanks for your help\r\n\r\nts_boolq_default_0.1.0_2987db1f15deaa19500ae24de560eabeaf1f8ef51df88c0470beeec72943bf11.lock\r\nTraceback (most recent call last):\r\n File \"finetune_t5_trainer.py\", line 274, in \r\n main()\r\n File \"finetune_t5_trainer.py\", line 147, in main\r\n for task in data_args.tasks]\r\n File \"finetune_t5_trainer.py\", line 147, in \r\n for task in data_args.tasks]\r\n File \"\/remote\/idiap.svm\/user.active\/rkarimi\/dev\/ruse\/seq2seq\/tasks\/tasks.py\", line 58, in get_dataset\r\n dataset = self.load_dataset(split=split)\r\n File \"\/remote\/idiap.svm\/user.active\/rkarimi\/dev\/ruse\/seq2seq\/tasks\/tasks.py\", line 54, in load_dataset\r\n return datasets.load_dataset(self.task.name, split=split)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 531, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \" \/idiap\/home\/rkarimi\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/boolq\/2987db1f15deaa19500ae24de560eabeaf1f8ef51df88c0470beeec72943bf11\/boolq.py\", line 74, in _split_generators\r\n downloaded_files = dl_manager.download_custom(urls_to_download, tf.io.gfile.copy)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 149, in download_custom\r\n custom_download(url, path)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/tensorflow\/python\/lib\/io\/file_io.py\", line 516, in copy_v2\r\n compat.path_to_bytes(src), compat.path_to_bytes(dst), overwrite)\r\ntensorflow.python.framework.errors_impl.AlreadyExistsError: file already exists\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1285\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1285\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1284","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1284\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1284\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1284\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1284","id":759269920,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MzAzNDk0","number":1284,"title":"Update coqa dataset url","user":{"login":"ojasaar","id":73708394,"node_id":"MDQ6VXNlcjczNzA4Mzk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73708394?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ojasaar","html_url":"https:\/\/github.com\/ojasaar","followers_url":"https:\/\/api.github.com\/users\/ojasaar\/followers","following_url":"https:\/\/api.github.com\/users\/ojasaar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ojasaar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ojasaar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ojasaar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ojasaar\/orgs","repos_url":"https:\/\/api.github.com\/users\/ojasaar\/repos","events_url":"https:\/\/api.github.com\/users\/ojasaar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ojasaar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T09:16:38Z","updated_at":"2020-12-08T18:19:09Z","closed_at":"2020-12-08T18:19:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1284","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1284","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1284.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1284.patch","merged_at":"2020-12-08T18:19:09Z"},"body":"`datasets.stanford.edu` is invalid.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1284\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1284\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1283","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1283\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1283\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1283\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1283","id":759251457,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0Mjg4MDg2","number":1283,"title":"Add dutch book review dataset","user":{"login":"benjaminvdb","id":8875786,"node_id":"MDQ6VXNlcjg4NzU3ODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8875786?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/benjaminvdb","html_url":"https:\/\/github.com\/benjaminvdb","followers_url":"https:\/\/api.github.com\/users\/benjaminvdb\/followers","following_url":"https:\/\/api.github.com\/users\/benjaminvdb\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/benjaminvdb\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/benjaminvdb\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/benjaminvdb\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/benjaminvdb\/orgs","repos_url":"https:\/\/api.github.com\/users\/benjaminvdb\/repos","events_url":"https:\/\/api.github.com\/users\/benjaminvdb\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/benjaminvdb\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T08:50:48Z","updated_at":"2020-12-09T20:21:58Z","closed_at":"2020-12-09T17:25:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1283","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1283","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1283.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1283.patch","merged_at":"2020-12-09T17:25:25Z"},"body":"- Name: Dutch Book Review Dataset (DBRD)\r\n- Description: The DBRD (pronounced dee-bird) dataset contains over 110k book reviews along with associated binary sentiment polarity labels and is intended as a benchmark for sentiment classification in Dutch.\r\n- Paper: https:\/\/arxiv.org\/abs\/1910.00896\r\n- Data: https:\/\/github.com\/benjaminvdb\/DBRD\r\n- Motivation: A large (real-life) dataset of Dutch book reviews and sentiment polarity (positive\/negative), based on the associated rating.\r\n\r\nChecks\r\n- [x] Create the dataset script \/datasets\/dbrd\/dbrd.py using the template\r\n- [x] Fill the _DESCRIPTION and _CITATION variables\r\n- [x] Implement _info(), _split_generators() and _generate_examples()\r\n- [x] Make sure that the BUILDER_CONFIGS class attribute is filled with the different configurations of the dataset and that the BUILDER_CONFIG_CLASS is specified if there is a custom config class.\r\n- [x] Generate the metadata file dataset_infos.json for all configurations\r\n- [x] Generate the dummy data dummy_data.zip files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card README.md using the template : fill the tags and the various paragraphs\r\n- [x] Both tests for the real data and the dummy data pass.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1283\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1283\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1282","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1282\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1282\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1282\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1282","id":759208335,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MjQ4NzI5","number":1282,"title":"add thaiqa_squad","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T08:14:38Z","updated_at":"2020-12-08T18:36:18Z","closed_at":"2020-12-08T18:36:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1282","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1282","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1282.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1282.patch","merged_at":"2020-12-08T18:36:18Z"},"body":"Example format is a little different from SQuAD since `thaiqa` always have one answer per question so I added a check to convert answers to lists if they are not already one to future-proof additional questions that might have multiple answers.\r\n\r\n`thaiqa_squad` is an open-domain, extractive question answering dataset (4,000 questions in `train` and 74 questions in `dev`) in [SQuAD](https:\/\/rajpurkar.github.io\/SQuAD-explorer\/) format, originally created by [NECTEC](https:\/\/www.nectec.or.th\/en\/) from Wikipedia articles and adapted to [SQuAD](https:\/\/rajpurkar.github.io\/SQuAD-explorer\/) format by [PyThaiNLP](https:\/\/github.com\/PyThaiNLP\/).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1282\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1282\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1281","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1281\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1281\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1281\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1281","id":759203317,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MjQ0MTA1","number":1281,"title":"adding hybrid_qa","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-08T08:10:19Z","updated_at":"2020-12-08T18:09:28Z","closed_at":"2020-12-08T18:07:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1281","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1281","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1281.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1281.patch","merged_at":"2020-12-08T18:07:00Z"},"body":"Adding HybridQA: A Dataset of Multi-Hop Question Answering over Tabular and Textual Data\r\nhttps:\/\/github.com\/wenhuchen\/HybridQA","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1281\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1281\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1280","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1280\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1280\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1280\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1280","id":759151028,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MTk2MDc0","number":1280,"title":"disaster response messages dataset","user":{"login":"darshan-gandhi","id":44197177,"node_id":"MDQ6VXNlcjQ0MTk3MTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44197177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/darshan-gandhi","html_url":"https:\/\/github.com\/darshan-gandhi","followers_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/followers","following_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/orgs","repos_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/repos","events_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/darshan-gandhi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-08T07:27:16Z","updated_at":"2020-12-09T16:21:57Z","closed_at":"2020-12-09T16:21:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1280","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1280","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1280.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1280.patch","merged_at":"2020-12-09T16:21:57Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1280\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1280\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1279","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1279\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1279\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1279\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1279","id":759108726,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MTU4OTY5","number":1279,"title":"added para_pat","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-08T06:28:47Z","updated_at":"2020-12-14T13:41:17Z","closed_at":"2020-12-14T13:41:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1279","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1279","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1279.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1279.patch","merged_at":"2020-12-14T13:41:17Z"},"body":"Dataset link : https:\/\/figshare.com\/articles\/ParaPat_The_Multi-Million_Sentences_Parallel_Corpus_of_Patents_Abstracts\/12627632\r\nWorking on README.md currently","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1279\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1279\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1278","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1278\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1278\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1278\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1278","id":758988465,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MDYwNDY5","number":1278,"title":"Craigslist bargains","user":{"login":"ZacharySBrown","id":7950786,"node_id":"MDQ6VXNlcjc5NTA3ODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7950786?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZacharySBrown","html_url":"https:\/\/github.com\/ZacharySBrown","followers_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/followers","following_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/repos","events_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZacharySBrown\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-08T01:45:55Z","updated_at":"2020-12-09T00:46:15Z","closed_at":"2020-12-09T00:46:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1278","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1278","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1278.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1278.patch","merged_at":null},"body":"`craigslist_bargains` dataset from [here](https:\/\/worksheets.codalab.org\/worksheets\/0x453913e76b65495d8b9730d41c7e0a0c\/)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1278\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1278\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1276","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1276\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1276\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1276\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1276","id":758965936,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MDQyODYy","number":1276,"title":"add One Million Posts Corpus","user":{"login":"aseifert","id":4944799,"node_id":"MDQ6VXNlcjQ5NDQ3OTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944799?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aseifert","html_url":"https:\/\/github.com\/aseifert","followers_url":"https:\/\/api.github.com\/users\/aseifert\/followers","following_url":"https:\/\/api.github.com\/users\/aseifert\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aseifert\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aseifert\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aseifert\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aseifert\/orgs","repos_url":"https:\/\/api.github.com\/users\/aseifert\/repos","events_url":"https:\/\/api.github.com\/users\/aseifert\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aseifert\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T00:50:08Z","updated_at":"2020-12-11T18:28:18Z","closed_at":"2020-12-11T18:28:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1276","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1276","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1276.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1276.patch","merged_at":"2020-12-11T18:28:18Z"},"body":"- **Name:** One Million Posts Corpus\r\n- **Description:** The \u201cOne Million Posts\u201d corpus is an annotated data set consisting of user comments posted to an Austrian newspaper website (in German language).\r\n- **Paper:** https:\/\/dl.acm.org\/doi\/10.1145\/3077136.3080711\r\n- **Data:** https:\/\/github.com\/OFAI\/million-post-corpus\r\n- **Motivation:** Big German (real-life) dataset containing different annotations around forum moderation with expert annotations.\r\n\r\n### Checkbox\r\n\r\n- [X] Create the dataset script `\/datasets\/my_dataset\/my_dataset.py` using the template\r\n- [X] Fill the `_DESCRIPTION` and `_CITATION` variables\r\n- [X] Implement `_infos()`, `_split_generators()` and `_generate_examples()`\r\n- [X] Make sure that the `BUILDER_CONFIGS` class attribute is filled with the different configurations of the dataset and that the `BUILDER_CONFIG_CLASS` is specified if there is a custom config class.\r\n- [X] Generate the metadata file `dataset_infos.json` for all configurations\r\n- [X] Generate the dummy data `dummy_data.zip` files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [X] Add the dataset card `README.md` using the template : fill the tags and the various paragraphs\r\n- [X] Both tests for the real data and the dummy data pass.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1276\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1276\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1275","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1275\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1275\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1275\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1275","id":758958066,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MDM2NjIw","number":1275,"title":"Yoruba GV NER added","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-08T00:31:38Z","updated_at":"2020-12-08T23:25:28Z","closed_at":"2020-12-08T23:25:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1275","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1275","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1275.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1275.patch","merged_at":null},"body":"I just added Yoruba GV NER dataset from this paper https:\/\/www.aclweb.org\/anthology\/2020.lrec-1.335\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1275\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1275\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1274","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1274\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1274\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1274\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1274","id":758943174,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MDI0MTQx","number":1274,"title":"oclar-dataset","user":{"login":"alaameloh","id":26907161,"node_id":"MDQ6VXNlcjI2OTA3MTYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26907161?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alaameloh","html_url":"https:\/\/github.com\/alaameloh","followers_url":"https:\/\/api.github.com\/users\/alaameloh\/followers","following_url":"https:\/\/api.github.com\/users\/alaameloh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alaameloh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alaameloh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alaameloh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alaameloh\/orgs","repos_url":"https:\/\/api.github.com\/users\/alaameloh\/repos","events_url":"https:\/\/api.github.com\/users\/alaameloh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alaameloh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T23:56:45Z","updated_at":"2020-12-09T15:36:08Z","closed_at":"2020-12-09T15:36:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1274","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1274","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1274.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1274.patch","merged_at":"2020-12-09T15:36:08Z"},"body":"Opinion Corpus for Lebanese Arabic Reviews (OCLAR) corpus is utilizable for Arabic sentiment classification on reviews, including hotels, restaurants, shops, and others. : [homepage](http:\/\/archive.ics.uci.edu\/ml\/datasets\/Opinion+Corpus+for+Lebanese+Arabic+Reviews+%28OCLAR%29#)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1274\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1274\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1273","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1273\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1273\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1273\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1273","id":758935768,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MDE4MjQ2","number":1273,"title":"Created wiki_movies dataset.","user":{"login":"aclifton314","id":53267795,"node_id":"MDQ6VXNlcjUzMjY3Nzk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53267795?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aclifton314","html_url":"https:\/\/github.com\/aclifton314","followers_url":"https:\/\/api.github.com\/users\/aclifton314\/followers","following_url":"https:\/\/api.github.com\/users\/aclifton314\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aclifton314\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aclifton314\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aclifton314\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aclifton314\/orgs","repos_url":"https:\/\/api.github.com\/users\/aclifton314\/repos","events_url":"https:\/\/api.github.com\/users\/aclifton314\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aclifton314\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-07T23:38:54Z","updated_at":"2020-12-14T13:56:49Z","closed_at":"2020-12-14T13:56:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1273","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1273","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1273.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1273.patch","merged_at":null},"body":"First PR (ever). Hopefully this movies dataset is useful to others!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1273\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1273\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1272","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1272\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1272\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1272\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1272","id":758924960,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MDA5MTk0","number":1272,"title":"Psc","user":{"login":"abecadel","id":1654113,"node_id":"MDQ6VXNlcjE2NTQxMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1654113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abecadel","html_url":"https:\/\/github.com\/abecadel","followers_url":"https:\/\/api.github.com\/users\/abecadel\/followers","following_url":"https:\/\/api.github.com\/users\/abecadel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abecadel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abecadel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abecadel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abecadel\/orgs","repos_url":"https:\/\/api.github.com\/users\/abecadel\/repos","events_url":"https:\/\/api.github.com\/users\/abecadel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abecadel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T23:19:36Z","updated_at":"2020-12-07T23:48:05Z","closed_at":"2020-12-07T23:47:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1272","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1272","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1272.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1272.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1272\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1272\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1271","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1271\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1271\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1271\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1271","id":758924203,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MDA4NTg4","number":1271,"title":"SMS Spam Dataset","user":{"login":"czabo","id":75574105,"node_id":"MDQ6VXNlcjc1NTc0MTA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75574105?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/czabo","html_url":"https:\/\/github.com\/czabo","followers_url":"https:\/\/api.github.com\/users\/czabo\/followers","following_url":"https:\/\/api.github.com\/users\/czabo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/czabo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/czabo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/czabo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/czabo\/orgs","repos_url":"https:\/\/api.github.com\/users\/czabo\/repos","events_url":"https:\/\/api.github.com\/users\/czabo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/czabo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T23:18:06Z","updated_at":"2020-12-08T17:42:19Z","closed_at":"2020-12-08T17:42:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1271","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1271","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1271.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1271.patch","merged_at":"2020-12-08T17:42:19Z"},"body":"Hi :) I added this [SMS Spam Dataset](http:\/\/archive.ics.uci.edu\/ml\/datasets\/SMS+Spam+Collection)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1271\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1271\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1270","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1270\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1270\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1270\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1270","id":758917216,"node_id":"MDExOlB1bGxSZXF1ZXN0NTM0MDAyODIz","number":1270,"title":"add DFKI SmartData Corpus","user":{"login":"aseifert","id":4944799,"node_id":"MDQ6VXNlcjQ5NDQ3OTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944799?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aseifert","html_url":"https:\/\/github.com\/aseifert","followers_url":"https:\/\/api.github.com\/users\/aseifert\/followers","following_url":"https:\/\/api.github.com\/users\/aseifert\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aseifert\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aseifert\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aseifert\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aseifert\/orgs","repos_url":"https:\/\/api.github.com\/users\/aseifert\/repos","events_url":"https:\/\/api.github.com\/users\/aseifert\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aseifert\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T23:03:48Z","updated_at":"2020-12-08T17:41:23Z","closed_at":"2020-12-08T17:41:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1270","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1270","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1270.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1270.patch","merged_at":"2020-12-08T17:41:23Z"},"body":"- **Name:** DFKI SmartData Corpus\r\n- **Description:** DFKI SmartData Corpus is a dataset of 2598 German-language documents which has been annotated with fine-grained geo-entities, such as streets, stops and routes, as well as standard named entity types.\r\n- **Paper:** https:\/\/www.dfki.de\/fileadmin\/user_upload\/import\/9427_lrec_smartdata_corpus.pdf\r\n- **Data:** https:\/\/github.com\/DFKI-NLP\/smartdata-corpus\r\n- **Motivation:** Contains fine-grained NER labels for German.\r\n\r\n### Checkbox\r\n\r\n- [X] Create the dataset script `\/datasets\/my_dataset\/my_dataset.py` using the template\r\n- [X] Fill the `_DESCRIPTION` and `_CITATION` variables\r\n- [X] Implement `_infos()`, `_split_generators()` and `_generate_examples()`\r\n- [X] Make sure that the `BUILDER_CONFIGS` class attribute is filled with the different configurations of the dataset and that the `BUILDER_CONFIG_CLASS` is specified if there is a custom config class.\r\n- [X] Generate the metadata file `dataset_infos.json` for all configurations\r\n- [X] Generate the dummy data `dummy_data.zip` files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [X] Add the dataset card `README.md` using the template : fill the tags and the various paragraphs\r\n- [X] Both tests for the real data and the dummy data pass.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1270\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1270\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1269","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1269\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1269\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1269\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1269","id":758886174,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzOTc3MTE2","number":1269,"title":"Adding OneStopEnglish corpus dataset","user":{"login":"purvimisal","id":22298787,"node_id":"MDQ6VXNlcjIyMjk4Nzg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22298787?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/purvimisal","html_url":"https:\/\/github.com\/purvimisal","followers_url":"https:\/\/api.github.com\/users\/purvimisal\/followers","following_url":"https:\/\/api.github.com\/users\/purvimisal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/purvimisal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/purvimisal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/purvimisal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/purvimisal\/orgs","repos_url":"https:\/\/api.github.com\/users\/purvimisal\/repos","events_url":"https:\/\/api.github.com\/users\/purvimisal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/purvimisal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T22:05:11Z","updated_at":"2020-12-09T18:43:38Z","closed_at":"2020-12-09T15:33:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1269","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1269","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1269.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1269.patch","merged_at":"2020-12-09T15:33:53Z"},"body":"This PR adds OneStopEnglish Corpus containing texts classified into reading levels (elementary, intermediate, advance) for automatic readability assessment and text simplification. \r\n\r\nLink to the paper: https:\/\/www.aclweb.org\/anthology\/W18-0535.pdf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1269\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1269\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1268","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1268\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1268\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1268\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1268","id":758871252,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzOTY0OTQ4","number":1268,"title":"new pr for Turkish NER","user":{"login":"merveenoyan","id":53175384,"node_id":"MDQ6VXNlcjUzMTc1Mzg0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53175384?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/merveenoyan","html_url":"https:\/\/github.com\/merveenoyan","followers_url":"https:\/\/api.github.com\/users\/merveenoyan\/followers","following_url":"https:\/\/api.github.com\/users\/merveenoyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/merveenoyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/merveenoyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/merveenoyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/merveenoyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/merveenoyan\/repos","events_url":"https:\/\/api.github.com\/users\/merveenoyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/merveenoyan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-07T21:40:26Z","updated_at":"2020-12-09T13:45:05Z","closed_at":"2020-12-09T13:45:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1268","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1268","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1268.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1268.patch","merged_at":"2020-12-09T13:45:05Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1268\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1268\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1267","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1267\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1267\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1267\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1267","id":758826568,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzOTMwNzU2","number":1267,"title":"Has part","user":{"login":"jeromeku","id":2455711,"node_id":"MDQ6VXNlcjI0NTU3MTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2455711?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jeromeku","html_url":"https:\/\/github.com\/jeromeku","followers_url":"https:\/\/api.github.com\/users\/jeromeku\/followers","following_url":"https:\/\/api.github.com\/users\/jeromeku\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jeromeku\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jeromeku\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jeromeku\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jeromeku\/orgs","repos_url":"https:\/\/api.github.com\/users\/jeromeku\/repos","events_url":"https:\/\/api.github.com\/users\/jeromeku\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jeromeku\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T20:32:03Z","updated_at":"2020-12-11T18:25:42Z","closed_at":"2020-12-11T18:25:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1267","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1267","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1267.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1267.patch","merged_at":"2020-12-11T18:25:42Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1267\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1267\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1266","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1266\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1266\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1266\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1266","id":758704178,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzODMyNTQ1","number":1266,"title":"removing unzipped hansards dummy data","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T17:31:16Z","updated_at":"2020-12-07T17:32:29Z","closed_at":"2020-12-07T17:32:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1266","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1266","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1266.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1266.patch","merged_at":"2020-12-07T17:32:28Z"},"body":"which were added by mistake","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1266\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1266\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1265","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1265\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1265\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1265\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1265","id":758687223,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzODE4NjY0","number":1265,"title":"Add CovidQA dataset","user":{"login":"olinguyen","id":4341867,"node_id":"MDQ6VXNlcjQzNDE4Njc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4341867?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/olinguyen","html_url":"https:\/\/github.com\/olinguyen","followers_url":"https:\/\/api.github.com\/users\/olinguyen\/followers","following_url":"https:\/\/api.github.com\/users\/olinguyen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/olinguyen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/olinguyen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/olinguyen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/olinguyen\/orgs","repos_url":"https:\/\/api.github.com\/users\/olinguyen\/repos","events_url":"https:\/\/api.github.com\/users\/olinguyen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/olinguyen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-07T17:06:51Z","updated_at":"2020-12-08T17:02:26Z","closed_at":"2020-12-08T17:02:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1265","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1265","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1265.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1265.patch","merged_at":"2020-12-08T17:02:26Z"},"body":"This PR adds CovidQA, a question answering dataset specifically designed for COVID-19, built by hand from knowledge gathered from Kaggle\u2019s COVID-19 Open Research Dataset Challenge.\r\n\r\nLink to the paper: https:\/\/arxiv.org\/pdf\/2004.11339.pdf\r\nLink to the homepage: https:\/\/covidqa.ai","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1265\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1265\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1264","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1264\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1264\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1264\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1264","id":758686474,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzODE4MDM2","number":1264,"title":"enriched webnlg dataset rebase","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T17:05:45Z","updated_at":"2020-12-09T17:00:29Z","closed_at":"2020-12-09T17:00:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1264","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1264","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1264.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1264.patch","merged_at":"2020-12-09T17:00:27Z"},"body":"Rebase of #1206 !","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1264\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1264\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1263","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1263\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1263\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1263\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1263","id":758663787,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNzk5NzU5","number":1263,"title":"Added kannada news headlines classification dataset. ","user":{"login":"vrindaprabhu","id":16264631,"node_id":"MDQ6VXNlcjE2MjY0NjMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16264631?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vrindaprabhu","html_url":"https:\/\/github.com\/vrindaprabhu","followers_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/followers","following_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/orgs","repos_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/repos","events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T16:35:37Z","updated_at":"2020-12-10T14:30:55Z","closed_at":"2020-12-09T18:01:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1263","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1263","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1263.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1263.patch","merged_at":"2020-12-09T18:01:31Z"},"body":"Manual Download of a kaggle dataset. Mostly followed process as ms_terms.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1263\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1263\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1262","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1262\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1262\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1262\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1262","id":758637124,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNzc3OTcy","number":1262,"title":"Adding msr_genomics_kbcomp dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T16:01:30Z","updated_at":"2020-12-08T18:08:55Z","closed_at":"2020-12-08T18:08:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1262","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1262","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1262.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1262.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1262\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1262\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1261","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1261\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1261\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1261\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1261","id":758626112,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNzY4OTgy","number":1261,"title":"Add Google Sentence Compression dataset","user":{"login":"mattbui","id":46804938,"node_id":"MDQ6VXNlcjQ2ODA0OTM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46804938?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mattbui","html_url":"https:\/\/github.com\/mattbui","followers_url":"https:\/\/api.github.com\/users\/mattbui\/followers","following_url":"https:\/\/api.github.com\/users\/mattbui\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mattbui\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mattbui\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mattbui\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mattbui\/orgs","repos_url":"https:\/\/api.github.com\/users\/mattbui\/repos","events_url":"https:\/\/api.github.com\/users\/mattbui\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mattbui\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T15:47:43Z","updated_at":"2020-12-08T17:01:59Z","closed_at":"2020-12-08T17:01:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1261","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1261","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1261.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1261.patch","merged_at":"2020-12-08T17:01:59Z"},"body":"For more information: https:\/\/www.aclweb.org\/anthology\/D13-1155.pdf","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1261\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1261\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1260","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1260\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1260\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1260\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1260","id":758601828,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNzQ4ODM3","number":1260,"title":"Added NewsPH Raw Dataset","user":{"login":"jcblaisecruz02","id":24757547,"node_id":"MDQ6VXNlcjI0NzU3NTQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24757547?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jcblaisecruz02","html_url":"https:\/\/github.com\/jcblaisecruz02","followers_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/followers","following_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/orgs","repos_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/repos","events_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T15:17:53Z","updated_at":"2020-12-08T16:27:15Z","closed_at":"2020-12-08T16:27:15Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1260","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1260","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1260.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1260.patch","merged_at":null},"body":"Added the raw version of the NewsPH dataset, which was used to automatically generate the NewsPH-NLI corpus. Dataset of news articles in Filipino from mainstream Philippine news sites on the internet. Can be used as a language modeling dataset or to reproduce the NewsPH-NLI dataset.\r\n\r\nPaper: https:\/\/arxiv.org\/abs\/2010.11574\r\nRepo: https:\/\/github.com\/jcblaisecruz02\/Filipino-Text-Benchmarks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1260\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1260\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1259","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1259\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1259\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1259\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1259","id":758565320,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNzE4NjMz","number":1259,"title":"Add KorQPair dataset","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-07T14:33:57Z","updated_at":"2021-12-29T00:49:40Z","closed_at":"2020-12-08T15:11:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1259","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1259","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1259.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1259.patch","merged_at":"2020-12-08T15:11:41Z"},"body":"This PR adds a [Korean paired question dataset](https:\/\/github.com\/songys\/Question_pair) containing labels indicating whether two questions in a given pair are semantically identical. This dataset was used to evaluate the performance of [KoGPT2](https:\/\/github.com\/SKT-AI\/KoGPT2#subtask-evaluations) on a phrase detection downstream task. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1259\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1259\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1258","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1258\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1258\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1258\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1258","id":758557169,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNzExOTQz","number":1258,"title":"arXiv dataset added","user":{"login":"tanmoyio","id":33005287,"node_id":"MDQ6VXNlcjMzMDA1Mjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33005287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tanmoyio","html_url":"https:\/\/github.com\/tanmoyio","followers_url":"https:\/\/api.github.com\/users\/tanmoyio\/followers","following_url":"https:\/\/api.github.com\/users\/tanmoyio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tanmoyio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tanmoyio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tanmoyio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tanmoyio\/orgs","repos_url":"https:\/\/api.github.com\/users\/tanmoyio\/repos","events_url":"https:\/\/api.github.com\/users\/tanmoyio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tanmoyio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T14:23:33Z","updated_at":"2020-12-08T14:07:15Z","closed_at":"2020-12-08T14:07:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1258","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1258","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1258.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1258.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1258\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1258\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1257","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1257\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1257\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1257\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1257","id":758550490,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNzA2NDQy","number":1257,"title":"Add Swahili news classification dataset","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T14:15:13Z","updated_at":"2020-12-08T14:44:19Z","closed_at":"2020-12-08T14:44:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1257","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1257","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1257.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1257.patch","merged_at":"2020-12-08T14:44:19Z"},"body":"Add Swahili news classification dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1257\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1257\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1256","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1256\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1256\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1256\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1256","id":758531980,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNjkwMTQ2","number":1256,"title":"adding LiMiT dataset","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T14:00:41Z","updated_at":"2020-12-08T14:58:28Z","closed_at":"2020-12-08T14:42:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1256","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1256","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1256.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1256.patch","merged_at":"2020-12-08T14:42:51Z"},"body":"Adding LiMiT: The Literal Motion in Text Dataset\r\nhttps:\/\/github.com\/ilmgut\/limit_dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1256\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1256\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1255","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1255\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1255\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1255\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1255","id":758530243,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNjg4Njg2","number":1255,"title":"[doc] nlp\/viewer \u27a1\ufe0fdatasets\/viewer","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T13:58:41Z","updated_at":"2020-12-08T17:17:54Z","closed_at":"2020-12-08T17:17:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1255","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1255","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1255.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1255.patch","merged_at":"2020-12-08T17:17:53Z"},"body":"cc @srush","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1255\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1255\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1254","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1254\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1254\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1254\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1254","id":758518774,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNjc5MTYy","number":1254,"title":"Added WikiText-TL-39","user":{"login":"jcblaisecruz02","id":24757547,"node_id":"MDQ6VXNlcjI0NzU3NTQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24757547?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jcblaisecruz02","html_url":"https:\/\/github.com\/jcblaisecruz02","followers_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/followers","following_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/orgs","repos_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/repos","events_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jcblaisecruz02\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T13:43:48Z","updated_at":"2020-12-08T16:00:58Z","closed_at":"2020-12-08T16:00:58Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1254","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1254","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1254.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1254.patch","merged_at":null},"body":"This PR adds the WikiText-TL-39 Filipino Language Modeling dataset.\r\n\r\nPaper: https:\/\/arxiv.org\/abs\/1907.00409\r\nRepo: https:\/\/github.com\/jcblaisecruz02\/Filipino-Text-Benchmarks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1254\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1254\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1253","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1253\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1253\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1253\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1253","id":758517391,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNjc4MDE1","number":1253,"title":"add thainer","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T13:41:54Z","updated_at":"2020-12-08T14:44:49Z","closed_at":"2020-12-08T14:44:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1253","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1253","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1253.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1253.patch","merged_at":"2020-12-08T14:44:49Z"},"body":"ThaiNER (v1.3) is a 6,456-sentence named entity recognition dataset created from expanding the 2,258-sentence\r\n[unnamed dataset](http:\/\/pioneer.chula.ac.th\/~awirote\/Data-Nutcha.zip) by\r\n[Tirasaroj and Aroonmanakun (2012)](http:\/\/pioneer.chula.ac.th\/~awirote\/publications\/).\r\nIt is used to train NER taggers in [PyThaiNLP](https:\/\/github.com\/PyThaiNLP\/pythainlp).\r\nThe NER tags are annotated by [Tirasaroj and Aroonmanakun (2012)]((http:\/\/pioneer.chula.ac.th\/~awirote\/publications\/))\r\nfor 2,258 sentences and the rest by [@wannaphong](https:\/\/github.com\/wannaphong\/).\r\nThe POS tags are done by [PyThaiNLP](https:\/\/github.com\/PyThaiNLP\/pythainlp)'s `perceptron` engine trained on `orchid_ud`.\r\n[@wannaphong](https:\/\/github.com\/wannaphong\/) is now the only maintainer of this dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1253\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1253\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1252","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1252\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1252\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1252\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1252","id":758511388,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNjczMDcx","number":1252,"title":"Add Naver sentiment movie corpus","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T13:33:45Z","updated_at":"2020-12-08T14:32:33Z","closed_at":"2020-12-08T14:21:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1252","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1252","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1252.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1252.patch","merged_at":"2020-12-08T14:21:37Z"},"body":"Supersedes #1168 \r\n\r\n> This PR adds the [Naver sentiment movie corpus](https:\/\/github.com\/e9t\/nsmc), a dataset containing Korean movie reviews from Naver, the most commonly used search engine in Korea. This dataset is often used to benchmark models on Korean NLP tasks, as seen in [this paper](https:\/\/www.aclweb.org\/anthology\/2020.lrec-1.199.pdf). ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1252\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1252\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1251","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1251\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1251\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1251\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1251","id":758503689,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNjY2NTg2","number":1251,"title":"Add Wiki Atomic Edits Dataset (43M edits)","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T13:23:08Z","updated_at":"2020-12-14T10:05:01Z","closed_at":"2020-12-14T10:05:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1251","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1251","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1251.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1251.patch","merged_at":"2020-12-14T10:05:00Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1251\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1251\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1250","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1250\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1250\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1250\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1250","id":758491704,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNjU2NTI4","number":1250,"title":"added Nergrit dataset","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T13:06:12Z","updated_at":"2020-12-08T14:33:29Z","closed_at":"2020-12-08T14:33:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1250","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1250","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1250.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1250.patch","merged_at":"2020-12-08T14:33:29Z"},"body":"Nergrit Corpus is a dataset collection for Indonesian Named Entity Recognition, Statement Extraction, and Sentiment Analysis. This PR is only for the Named Entity Recognition.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1250\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1250\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1249","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1249\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1249\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1249\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1249","id":758472863,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNjQwNjA1","number":1249,"title":"Add doc2dial dataset","user":{"login":"KMFODA","id":35491698,"node_id":"MDQ6VXNlcjM1NDkxNjk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35491698?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KMFODA","html_url":"https:\/\/github.com\/KMFODA","followers_url":"https:\/\/api.github.com\/users\/KMFODA\/followers","following_url":"https:\/\/api.github.com\/users\/KMFODA\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KMFODA\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KMFODA\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KMFODA\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KMFODA\/orgs","repos_url":"https:\/\/api.github.com\/users\/KMFODA\/repos","events_url":"https:\/\/api.github.com\/users\/KMFODA\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KMFODA\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-07T12:39:09Z","updated_at":"2020-12-14T16:17:14Z","closed_at":"2020-12-14T16:17:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1249","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1249","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1249.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1249.patch","merged_at":"2020-12-14T16:17:14Z"},"body":"### Doc2dial: A Goal-Oriented Document-Grounded Dialogue Dataset v0.9\r\n\r\nOnce complete this will add the [Doc2dial](https:\/\/doc2dial.github.io\/data.html) dataset from the generic data sets list.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1249\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1249\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1248","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1248\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1248\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1248\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1248","id":758454438,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNjI0ODY5","number":1248,"title":"Update step-by-step guide about the dataset cards","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T12:12:12Z","updated_at":"2020-12-07T13:19:24Z","closed_at":"2020-12-07T13:19:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1248","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1248","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1248.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1248.patch","merged_at":"2020-12-07T13:19:23Z"},"body":"Small update in the step-by-step guide about the dataset cards to indicate it can be created and completing while exploring the dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1248\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1248\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1247","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1247\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1247\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1247\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1247","id":758431640,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNjA1NzE2","number":1247,"title":"Adding indonlu dataset","user":{"login":"yasirabd","id":6518504,"node_id":"MDQ6VXNlcjY1MTg1MDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6518504?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yasirabd","html_url":"https:\/\/github.com\/yasirabd","followers_url":"https:\/\/api.github.com\/users\/yasirabd\/followers","following_url":"https:\/\/api.github.com\/users\/yasirabd\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yasirabd\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yasirabd\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yasirabd\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yasirabd\/orgs","repos_url":"https:\/\/api.github.com\/users\/yasirabd\/repos","events_url":"https:\/\/api.github.com\/users\/yasirabd\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yasirabd\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-07T11:38:45Z","updated_at":"2020-12-08T14:11:50Z","closed_at":"2020-12-08T14:11:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1247","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1247","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1247.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1247.patch","merged_at":null},"body":"IndoNLU benchmark is a collection of resources for training, evaluating, and analyzing natural language understanding systems for Bahasa Indonesia. It contains 12 datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1247\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1247\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1246","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1246\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1246\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1246\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1246","id":758418652,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNTk0NjIz","number":1246,"title":"arXiv dataset added","user":{"login":"tanmoyio","id":33005287,"node_id":"MDQ6VXNlcjMzMDA1Mjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33005287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tanmoyio","html_url":"https:\/\/github.com\/tanmoyio","followers_url":"https:\/\/api.github.com\/users\/tanmoyio\/followers","following_url":"https:\/\/api.github.com\/users\/tanmoyio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tanmoyio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tanmoyio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tanmoyio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tanmoyio\/orgs","repos_url":"https:\/\/api.github.com\/users\/tanmoyio\/repos","events_url":"https:\/\/api.github.com\/users\/tanmoyio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tanmoyio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T11:20:23Z","updated_at":"2020-12-07T14:22:58Z","closed_at":"2020-12-07T14:22:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1246","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1246","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1246.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1246.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1246\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1246\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1245","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1245\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1245\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1245\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1245","id":758411233,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNTg4NDUw","number":1245,"title":"Add Google Turkish Treebank Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T11:09:17Z","updated_at":"2020-12-16T16:30:24Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1245","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1245","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1245.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1245.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1245\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1245\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1244","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1244\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1244\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1244\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1244","id":758384417,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNTY1ODMz","number":1244,"title":"arxiv dataset added","user":{"login":"tanmoyio","id":33005287,"node_id":"MDQ6VXNlcjMzMDA1Mjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33005287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tanmoyio","html_url":"https:\/\/github.com\/tanmoyio","followers_url":"https:\/\/api.github.com\/users\/tanmoyio\/followers","following_url":"https:\/\/api.github.com\/users\/tanmoyio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tanmoyio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tanmoyio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tanmoyio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tanmoyio\/orgs","repos_url":"https:\/\/api.github.com\/users\/tanmoyio\/repos","events_url":"https:\/\/api.github.com\/users\/tanmoyio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tanmoyio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T10:32:54Z","updated_at":"2020-12-07T11:04:23Z","closed_at":"2020-12-07T11:04:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1244","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1244","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1244.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1244.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1244\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1244\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1243","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1243\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1243\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1243\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1243","id":758378904,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNTYxNDAx","number":1243,"title":"Add Google Noun Verb Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T10:26:05Z","updated_at":"2020-12-22T12:47:16Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1243","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1243","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1243.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1243.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1243\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1243\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1242","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1242\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1242\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1242\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1242","id":758370579,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNTU0MzAx","number":1242,"title":"adding bprec","user":{"login":"kldarek","id":15803781,"node_id":"MDQ6VXNlcjE1ODAzNzgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15803781?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kldarek","html_url":"https:\/\/github.com\/kldarek","followers_url":"https:\/\/api.github.com\/users\/kldarek\/followers","following_url":"https:\/\/api.github.com\/users\/kldarek\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kldarek\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kldarek\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kldarek\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kldarek\/orgs","repos_url":"https:\/\/api.github.com\/users\/kldarek\/repos","events_url":"https:\/\/api.github.com\/users\/kldarek\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kldarek\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-07T10:15:49Z","updated_at":"2020-12-08T14:33:49Z","closed_at":"2020-12-08T14:33:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1242","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1242","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1242.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1242.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1242\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1242\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1241","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1241\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1241\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1241\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1241","id":758360643,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNTQ1OTQ0","number":1241,"title":"Opus elhuyar dataset for MT task having languages pair in Spanish to Basque","user":{"login":"spatil6","id":6419011,"node_id":"MDQ6VXNlcjY0MTkwMTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6419011?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/spatil6","html_url":"https:\/\/github.com\/spatil6","followers_url":"https:\/\/api.github.com\/users\/spatil6\/followers","following_url":"https:\/\/api.github.com\/users\/spatil6\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/spatil6\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/spatil6\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/spatil6\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/spatil6\/orgs","repos_url":"https:\/\/api.github.com\/users\/spatil6\/repos","events_url":"https:\/\/api.github.com\/users\/spatil6\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/spatil6\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T10:03:34Z","updated_at":"2020-12-19T14:55:12Z","closed_at":"2020-12-09T15:12:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1241","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1241","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1241.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1241.patch","merged_at":"2020-12-09T15:12:48Z"},"body":"Opus elhuyar dataset for MT task having languages pair in Spanish to Basque\r\nMore info : http:\/\/opus.nlpl.eu\/Elhuyar.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1241\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1241\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1240","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1240\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1240\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1240\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1240","id":758355523,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNTQxNjk5","number":1240,"title":"Multi Domain Sentiment Analysis Dataset (MDSA)","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-12-07T09:57:15Z","updated_at":"2020-12-16T16:26:23Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1240","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1240","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1240.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1240.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1240\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1240\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1239","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1239\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1239\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1239\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1239","id":758339593,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNTI4NTU5","number":1239,"title":"add yelp_review_full dataset","user":{"login":"hfawaz","id":29229602,"node_id":"MDQ6VXNlcjI5MjI5NjAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29229602?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hfawaz","html_url":"https:\/\/github.com\/hfawaz","followers_url":"https:\/\/api.github.com\/users\/hfawaz\/followers","following_url":"https:\/\/api.github.com\/users\/hfawaz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hfawaz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hfawaz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hfawaz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hfawaz\/orgs","repos_url":"https:\/\/api.github.com\/users\/hfawaz\/repos","events_url":"https:\/\/api.github.com\/users\/hfawaz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hfawaz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T09:35:36Z","updated_at":"2020-12-08T15:43:24Z","closed_at":"2020-12-08T15:00:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1239","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1239","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1239.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1239.patch","merged_at":null},"body":"This corresponds to the Yelp-5 requested in https:\/\/github.com\/huggingface\/datasets\/issues\/353 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1239\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1239\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1238","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1238\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1238\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1238\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1238","id":758321688,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNTEzODUw","number":1238,"title":"adding poem_sentiment","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T09:11:52Z","updated_at":"2020-12-09T16:36:10Z","closed_at":"2020-12-09T16:02:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1238","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1238","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1238.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1238.patch","merged_at":"2020-12-09T16:02:45Z"},"body":"Adding poem_sentiment dataset.\r\nhttps:\/\/github.com\/google-research-datasets\/poem-sentiment","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1238\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1238\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1237","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1237\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1237\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1237\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1237","id":758318353,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNTExMDky","number":1237,"title":"Add AmbigQA dataset","user":{"login":"cceyda","id":15624271,"node_id":"MDQ6VXNlcjE1NjI0Mjcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15624271?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cceyda","html_url":"https:\/\/github.com\/cceyda","followers_url":"https:\/\/api.github.com\/users\/cceyda\/followers","following_url":"https:\/\/api.github.com\/users\/cceyda\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cceyda\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cceyda\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cceyda\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cceyda\/orgs","repos_url":"https:\/\/api.github.com\/users\/cceyda\/repos","events_url":"https:\/\/api.github.com\/users\/cceyda\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cceyda\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T09:07:19Z","updated_at":"2020-12-08T13:38:52Z","closed_at":"2020-12-08T13:38:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1237","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1237","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1237.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1237.patch","merged_at":"2020-12-08T13:38:52Z"},"body":"# AmbigQA: Answering Ambiguous Open-domain Questions Dataset\r\nAdding the [AmbigQA](https:\/\/nlp.cs.washington.edu\/ambigqa\/) dataset as part of the sprint \ud83c\udf89 (from Open dataset list for Dataset sprint)\r\n\r\nAdded both the light and full versions (as seen on the dataset homepage)\r\nThe json format changes based on the value of one 'type' field, so I set the unavailable field to an empty list. This is explained in the README -> Data Fields\r\n\r\n```py\r\ntrain_light_dataset = load_dataset('.\/datasets\/ambig_qa',\"light\",split=\"train\")\r\nval_light_dataset = load_dataset('.\/datasets\/ambig_qa',\"light\",split=\"validation\")\r\ntrain_full_dataset = load_dataset('.\/datasets\/ambig_qa',\"full\",split=\"train\")\r\nval_full_dataset = load_dataset('.\/datasets\/ambig_qa',\"full\",split=\"validation\")\r\n\r\n\r\nfor example in train_light_dataset:\r\n for i,t in enumerate(example['annotations']['type']):\r\n if t =='singleAnswer':\r\n # use the example['annotations']['answer'][i]\r\n # example['annotations']['qaPairs'][i] - > is []\r\n print(example['annotations']['answer'][i])\r\n else:\r\n # use the example['annotations']['qaPairs'][i]\r\n # example['annotations']['answer'][i] - > is []\r\n print(example['annotations']['qaPairs'][i])\r\n\r\n```\r\n\r\n- [x] All tests passed\r\n- [x] Added dummy data\r\n- [x] Added data card (as much as I could)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1237\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1237\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1236","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1236\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1236\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1236\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1236","id":758263012,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNDYzOTg2","number":1236,"title":"Opus finlex dataset of language pair Finnish and Swedish","user":{"login":"spatil6","id":6419011,"node_id":"MDQ6VXNlcjY0MTkwMTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6419011?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/spatil6","html_url":"https:\/\/github.com\/spatil6","followers_url":"https:\/\/api.github.com\/users\/spatil6\/followers","following_url":"https:\/\/api.github.com\/users\/spatil6\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/spatil6\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/spatil6\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/spatil6\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/spatil6\/orgs","repos_url":"https:\/\/api.github.com\/users\/spatil6\/repos","events_url":"https:\/\/api.github.com\/users\/spatil6\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/spatil6\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T07:53:57Z","updated_at":"2020-12-08T13:30:33Z","closed_at":"2020-12-08T13:30:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1236","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1236","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1236.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1236.patch","merged_at":"2020-12-08T13:30:33Z"},"body":"Added Opus_finlex dataset of language pair Finnish and Swedish\r\nMore info : http:\/\/opus.nlpl.eu\/Finlex.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1236\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1236\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1235","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1235\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1235\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1235\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1235","id":758234511,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNDM5NDk4","number":1235,"title":"Wino bias","user":{"login":"akshayb7","id":29649801,"node_id":"MDQ6VXNlcjI5NjQ5ODAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29649801?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/akshayb7","html_url":"https:\/\/github.com\/akshayb7","followers_url":"https:\/\/api.github.com\/users\/akshayb7\/followers","following_url":"https:\/\/api.github.com\/users\/akshayb7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/akshayb7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/akshayb7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/akshayb7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/akshayb7\/orgs","repos_url":"https:\/\/api.github.com\/users\/akshayb7\/repos","events_url":"https:\/\/api.github.com\/users\/akshayb7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/akshayb7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T07:12:42Z","updated_at":"2020-12-10T20:48:12Z","closed_at":"2020-12-10T20:48:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1235","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1235","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1235.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1235.patch","merged_at":null},"body":"The PR will fail circleCi tests because of the requirement of manual loading of data. Fresh PR because of messed up history of the previous one. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1235\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1235\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1234","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1234\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1234\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1234\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1234","id":758229304,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzNDM0ODkz","number":1234,"title":"Added ade_corpus_v2, with 3 configs for relation extraction and classification task","user":{"login":"Nilanshrajput","id":28673745,"node_id":"MDQ6VXNlcjI4NjczNzQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28673745?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Nilanshrajput","html_url":"https:\/\/github.com\/Nilanshrajput","followers_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/followers","following_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/orgs","repos_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/repos","events_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-07T07:05:14Z","updated_at":"2020-12-14T17:49:14Z","closed_at":"2020-12-14T17:49:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1234","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1234","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1234.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1234.patch","merged_at":"2020-12-14T17:49:14Z"},"body":"Adverse Drug Reaction Data: ADE-Corpus-V2 dataset added configs for different tasks with given data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1234\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1234\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1233","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1233\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1233\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1233\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1233","id":758188699,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMzk5NTY3","number":1233,"title":"Add Curiosity Dialogs Dataset","user":{"login":"vineeths96","id":50873201,"node_id":"MDQ6VXNlcjUwODczMjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50873201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vineeths96","html_url":"https:\/\/github.com\/vineeths96","followers_url":"https:\/\/api.github.com\/users\/vineeths96\/followers","following_url":"https:\/\/api.github.com\/users\/vineeths96\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vineeths96\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vineeths96\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vineeths96\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vineeths96\/orgs","repos_url":"https:\/\/api.github.com\/users\/vineeths96\/repos","events_url":"https:\/\/api.github.com\/users\/vineeths96\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vineeths96\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-07T06:01:00Z","updated_at":"2020-12-20T13:34:09Z","closed_at":"2020-12-09T14:50:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1233","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1233","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1233.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1233.patch","merged_at":"2020-12-09T14:50:29Z"},"body":"Add Facebook [Curiosity Dialogs](https:\/\/github.com\/facebookresearch\/curiosity) Dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1233\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1233\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1232","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1232\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1232\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1232\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1232","id":758180669,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMzkyNTc0","number":1232,"title":"Add Grail QA dataset","user":{"login":"mattbui","id":46804938,"node_id":"MDQ6VXNlcjQ2ODA0OTM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46804938?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mattbui","html_url":"https:\/\/github.com\/mattbui","followers_url":"https:\/\/api.github.com\/users\/mattbui\/followers","following_url":"https:\/\/api.github.com\/users\/mattbui\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mattbui\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mattbui\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mattbui\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mattbui\/orgs","repos_url":"https:\/\/api.github.com\/users\/mattbui\/repos","events_url":"https:\/\/api.github.com\/users\/mattbui\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mattbui\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T05:46:45Z","updated_at":"2020-12-08T13:03:19Z","closed_at":"2020-12-08T13:03:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1232","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1232","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1232.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1232.patch","merged_at":"2020-12-08T13:03:19Z"},"body":"For more information: https:\/\/dki-lab.github.io\/GrailQA\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1232\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1232\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1231","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1231\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1231\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1231\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1231","id":758121398,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMzQzMzAz","number":1231,"title":"Add Urdu Sentiment Corpus (USC)","user":{"login":"chaitnayabasava","id":44389205,"node_id":"MDQ6VXNlcjQ0Mzg5MjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44389205?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chaitnayabasava","html_url":"https:\/\/github.com\/chaitnayabasava","followers_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/followers","following_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/orgs","repos_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/repos","events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-07T03:25:20Z","updated_at":"2020-12-07T18:05:16Z","closed_at":"2020-12-07T16:43:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1231","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1231","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1231.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1231.patch","merged_at":"2020-12-07T16:43:23Z"},"body":"@lhoestq opened a clean PR containing only relevant files.\r\n\r\nold PR #1140","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1231\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1231\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1230","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1230\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1230\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1230\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1230","id":758119342,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMzQxNTg0","number":1230,"title":"Add Urdu fake news dataset","user":{"login":"chaitnayabasava","id":44389205,"node_id":"MDQ6VXNlcjQ0Mzg5MjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44389205?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chaitnayabasava","html_url":"https:\/\/github.com\/chaitnayabasava","followers_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/followers","following_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/orgs","repos_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/repos","events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-07T03:19:50Z","updated_at":"2020-12-07T18:04:55Z","closed_at":"2020-12-07T16:57:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1230","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1230","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1230.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1230.patch","merged_at":"2020-12-07T16:57:54Z"},"body":"@lhoestq opened a clean PR containing only relevant files.\r\n\r\nold PR #1125 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1230\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1230\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1229","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1229\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1229\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1229\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1229","id":758100707,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMzI2OTgw","number":1229,"title":"Muchocine - Spanish movie reviews dataset","user":{"login":"mapmeld","id":643918,"node_id":"MDQ6VXNlcjY0MzkxOA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/643918?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mapmeld","html_url":"https:\/\/github.com\/mapmeld","followers_url":"https:\/\/api.github.com\/users\/mapmeld\/followers","following_url":"https:\/\/api.github.com\/users\/mapmeld\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mapmeld\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mapmeld\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mapmeld\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mapmeld\/orgs","repos_url":"https:\/\/api.github.com\/users\/mapmeld\/repos","events_url":"https:\/\/api.github.com\/users\/mapmeld\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mapmeld\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-07T02:23:29Z","updated_at":"2020-12-21T10:09:09Z","closed_at":"2020-12-21T10:09:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1229","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1229","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1229.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1229.patch","merged_at":"2020-12-21T10:09:09Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1229\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1229\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1228","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1228\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1228\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1228\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1228","id":758049068,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjg1ODI2","number":1228,"title":"add opus_100 dataset","user":{"login":"vasudevgupta7","id":53136577,"node_id":"MDQ6VXNlcjUzMTM2NTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53136577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vasudevgupta7","html_url":"https:\/\/github.com\/vasudevgupta7","followers_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/followers","following_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/orgs","repos_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/repos","events_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-06T23:17:24Z","updated_at":"2020-12-09T14:54:00Z","closed_at":"2020-12-09T14:54:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1228","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1228","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1228.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1228.patch","merged_at":"2020-12-09T14:53:59Z"},"body":"This PR will add [opus100 dataset](http:\/\/opus.nlpl.eu\/opus-100.php).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1228\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1228\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1227","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1227\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1227\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1227\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1227","id":758049060,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjg1ODIx","number":1227,"title":"readme: remove link to Google's responsible AI practices","user":{"login":"stefan-it","id":20651387,"node_id":"MDQ6VXNlcjIwNjUxMzg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20651387?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stefan-it","html_url":"https:\/\/github.com\/stefan-it","followers_url":"https:\/\/api.github.com\/users\/stefan-it\/followers","following_url":"https:\/\/api.github.com\/users\/stefan-it\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stefan-it\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stefan-it\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stefan-it\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stefan-it\/orgs","repos_url":"https:\/\/api.github.com\/users\/stefan-it\/repos","events_url":"https:\/\/api.github.com\/users\/stefan-it\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stefan-it\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T23:17:22Z","updated_at":"2020-12-07T08:35:19Z","closed_at":"2020-12-06T23:20:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1227","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1227","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1227.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1227.patch","merged_at":"2020-12-06T23:20:41Z"},"body":"...maybe we'll find a company that reallly stands behind responsible AI practices ;)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1227\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1227\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1226","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1226\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1226\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1226\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1226","id":758036979,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjc2OTU3","number":1226,"title":"Add menyo_20k_mt dataset","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-06T22:16:15Z","updated_at":"2020-12-10T19:22:14Z","closed_at":"2020-12-10T19:22:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1226","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1226","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1226.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1226.patch","merged_at":null},"body":"Add menyo_20k_mt dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1226\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1226\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1225","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1225\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1225\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1225\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1225","id":758035501,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjc1ODcx","number":1225,"title":"Add Winobias dataset","user":{"login":"akshayb7","id":29649801,"node_id":"MDQ6VXNlcjI5NjQ5ODAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29649801?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/akshayb7","html_url":"https:\/\/github.com\/akshayb7","followers_url":"https:\/\/api.github.com\/users\/akshayb7\/followers","following_url":"https:\/\/api.github.com\/users\/akshayb7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/akshayb7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/akshayb7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/akshayb7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/akshayb7\/orgs","repos_url":"https:\/\/api.github.com\/users\/akshayb7\/repos","events_url":"https:\/\/api.github.com\/users\/akshayb7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/akshayb7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-06T22:08:20Z","updated_at":"2020-12-07T06:45:59Z","closed_at":"2020-12-07T06:40:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1225","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1225","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1225.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1225.patch","merged_at":null},"body":"Pardon me for different commits with same message. There were conflicts after I rebased master while simultaneously pushing my changes to local repo, hence the duplicate entries.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1225\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1225\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1224","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1224\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1224\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1224\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1224","id":758022998,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjY2Njg1","number":1224,"title":"adding conceptnet5","user":{"login":"ontocord","id":8900094,"node_id":"MDQ6VXNlcjg5MDAwOTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8900094?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ontocord","html_url":"https:\/\/github.com\/ontocord","followers_url":"https:\/\/api.github.com\/users\/ontocord\/followers","following_url":"https:\/\/api.github.com\/users\/ontocord\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ontocord\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ontocord\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ontocord\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ontocord\/orgs","repos_url":"https:\/\/api.github.com\/users\/ontocord\/repos","events_url":"https:\/\/api.github.com\/users\/ontocord\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ontocord\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-12-06T21:06:53Z","updated_at":"2020-12-09T16:38:16Z","closed_at":"2020-12-09T14:37:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1224","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1224","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1224.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1224.patch","merged_at":"2020-12-09T14:37:17Z"},"body":"Adding the conceptnet5 and omcs txt files used to create the conceptnet5 dataset. Conceptne5 is a common sense dataset. More info can be found here: https:\/\/github.com\/commonsense\/conceptnet5\/wiki","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1224\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1224\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1223","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1223\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1223\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1223\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1223","id":758022208,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjY2MDc4","number":1223,"title":"\ud83c\uddf8\ud83c\uddea Added Swedish Reviews dataset for sentiment classification in Sw\u2026","user":{"login":"timpal0l","id":6556710,"node_id":"MDQ6VXNlcjY1NTY3MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6556710?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timpal0l","html_url":"https:\/\/github.com\/timpal0l","followers_url":"https:\/\/api.github.com\/users\/timpal0l\/followers","following_url":"https:\/\/api.github.com\/users\/timpal0l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timpal0l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timpal0l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timpal0l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timpal0l\/orgs","repos_url":"https:\/\/api.github.com\/users\/timpal0l\/repos","events_url":"https:\/\/api.github.com\/users\/timpal0l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timpal0l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T21:02:54Z","updated_at":"2020-12-08T10:54:56Z","closed_at":"2020-12-08T10:54:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1223","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1223","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1223.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1223.patch","merged_at":"2020-12-08T10:54:56Z"},"body":"perhaps: @lhoestq \ud83e\udd17 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1223\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1223\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1222","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1222\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1222\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1222\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1222","id":758018953,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjYzODIx","number":1222,"title":"Add numeric fused head dataset","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-06T20:46:53Z","updated_at":"2020-12-08T11:17:56Z","closed_at":"2020-12-08T11:17:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1222","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1222","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1222.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1222.patch","merged_at":"2020-12-08T11:17:55Z"},"body":"Adding the [NFH: Numeric Fused Head](https:\/\/nlp.biu.ac.il\/~lazary\/fh\/) dataset.\r\n\r\nEverything looks sensible and I've included both the identification and resolution tasks. I haven't personally used this dataset in my research so am unable to specify what the default configuration \/ supervised keys should be.\r\n\r\nI've filled out the basic info on the model card to the best of my knowledge but it's a little tricky to understand exactly what the fields represent.\r\n\r\nDataset author: @yanaiela ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1222\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1222\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1221","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1221\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1221\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1221\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1221","id":758016032,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjYxNjkw","number":1221,"title":"Add HKCanCor","user":{"login":"j-chim","id":22435209,"node_id":"MDQ6VXNlcjIyNDM1MjA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22435209?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/j-chim","html_url":"https:\/\/github.com\/j-chim","followers_url":"https:\/\/api.github.com\/users\/j-chim\/followers","following_url":"https:\/\/api.github.com\/users\/j-chim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/j-chim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/j-chim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/j-chim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/j-chim\/orgs","repos_url":"https:\/\/api.github.com\/users\/j-chim\/repos","events_url":"https:\/\/api.github.com\/users\/j-chim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/j-chim\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T20:32:07Z","updated_at":"2020-12-09T16:34:18Z","closed_at":"2020-12-09T16:34:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1221","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1221","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1221.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1221.patch","merged_at":"2020-12-09T16:34:18Z"},"body":"This PR adds the [Hong Kong Cantonese Corpus](http:\/\/compling.hss.ntu.edu.sg\/hkcancor\/), by [Luke and Wong 2015](http:\/\/compling.hss.ntu.edu.sg\/hkcancor\/data\/LukeWong_Hong-Kong-Cantonese-Corpus.pdf). \r\n\r\nThe dummy data included here was manually created, as the original dataset uses a xml-like format (see a copy hosted [here](https:\/\/github.com\/fcbond\/hkcancor\/blob\/master\/sample\/d1_v.txt) for example) that requires a few processing steps. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1221\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1221\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1220","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1220\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1220\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1220\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1220","id":758015894,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjYxNTgw","number":1220,"title":"add Korean HateSpeech dataset","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-06T20:31:29Z","updated_at":"2020-12-08T15:21:09Z","closed_at":"2020-12-08T11:05:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1220","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1220","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1220.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1220.patch","merged_at":"2020-12-08T11:05:42Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1220\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1220\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1219","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1219\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1219\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1219\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1219","id":758013368,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjU5NzMw","number":1219,"title":"Add Korean NER dataset","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T20:19:06Z","updated_at":"2021-12-29T00:50:59Z","closed_at":"2020-12-08T10:25:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1219","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1219","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1219.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1219.patch","merged_at":"2020-12-08T10:25:33Z"},"body":"Supersedes #1177 \r\n\r\n> This PR adds the [Korean named entity recognition dataset](https:\/\/github.com\/kmounlp\/NER). This dataset has been used in many downstream tasks, such as training [KoBERT](https:\/\/github.com\/SKTBrain\/KoBERT) for NER, as seen in this [KoBERT-CRF implementation](https:\/\/github.com\/eagle705\/pytorch-bert-crf-ner).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1219\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1219\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1218","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1218\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1218\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1218\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1218","id":758009113,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjU2NzIz","number":1218,"title":"Add WMT20 MLQE 3 shared tasks","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-06T19:59:12Z","updated_at":"2020-12-15T15:27:30Z","closed_at":"2020-12-15T15:27:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1218","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1218","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1218.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1218.patch","merged_at":"2020-12-15T15:27:29Z"},"body":"3 tasks for the WMT 20 MLQE shared tasks -> 3 different datasets\r\n\r\n(I re-created #1137 because it was too messy).\r\n\r\nNote that in L199 `task3.py`, I used `logging.warning` to print some missing data in the train set.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1218\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1218\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1217","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1217\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1217\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1217\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1217","id":758008321,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjU2MjU4","number":1217,"title":"adding DataCommons fact checking","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T19:56:12Z","updated_at":"2020-12-16T16:22:48Z","closed_at":"2020-12-16T16:22:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1217","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1217","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1217.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1217.patch","merged_at":"2020-12-16T16:22:48Z"},"body":"Adding the data from: https:\/\/datacommons.org\/factcheck\/\r\n\r\nHad to cheat a bit with the dummy data as the test doesn't recognize `.txt.gz`: had to rename uncompressed files with the `.gz` extension manually without actually compressing","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1217\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1217\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1216","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1216\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1216\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1216\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1216","id":758005982,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjU0ODE2","number":1216,"title":"Add limit","user":{"login":"j-chim","id":22435209,"node_id":"MDQ6VXNlcjIyNDM1MjA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22435209?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/j-chim","html_url":"https:\/\/github.com\/j-chim","followers_url":"https:\/\/api.github.com\/users\/j-chim\/followers","following_url":"https:\/\/api.github.com\/users\/j-chim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/j-chim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/j-chim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/j-chim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/j-chim\/orgs","repos_url":"https:\/\/api.github.com\/users\/j-chim\/repos","events_url":"https:\/\/api.github.com\/users\/j-chim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/j-chim\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-06T19:46:18Z","updated_at":"2020-12-08T07:52:11Z","closed_at":"2020-12-08T07:52:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1216","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1216","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1216.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1216.patch","merged_at":null},"body":"This PR adds [LiMiT](https:\/\/github.com\/ilmgut\/limit_dataset), a dataset for literal motion classification\/extraction by [Manotas et al., 2020](https:\/\/www.aclweb.org\/anthology\/2020.findings-emnlp.88.pdf).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1216\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1216\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1215","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1215\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1215\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1215\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1215","id":758002885,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjUyNjUx","number":1215,"title":"Add irc disentanglement","user":{"login":"dhruvjoshi1998","id":32560035,"node_id":"MDQ6VXNlcjMyNTYwMDM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32560035?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dhruvjoshi1998","html_url":"https:\/\/github.com\/dhruvjoshi1998","followers_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/followers","following_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/orgs","repos_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/repos","events_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dhruvjoshi1998\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-06T19:30:46Z","updated_at":"2020-12-16T16:18:25Z","closed_at":"2020-12-16T16:18:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1215","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1215","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1215.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1215.patch","merged_at":null},"body":"added files for irc disentanglement dataset\r\nwas unable to test dummy data as a result of vpn\/proxy issues","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1215\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1215\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1214","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1214\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1214\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1214\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1214","id":758002786,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjUyNTgx","number":1214,"title":"adding medical-questions-pairs dataset","user":{"login":"tuner007","id":46425391,"node_id":"MDQ6VXNlcjQ2NDI1Mzkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46425391?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tuner007","html_url":"https:\/\/github.com\/tuner007","followers_url":"https:\/\/api.github.com\/users\/tuner007\/followers","following_url":"https:\/\/api.github.com\/users\/tuner007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tuner007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tuner007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tuner007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tuner007\/orgs","repos_url":"https:\/\/api.github.com\/users\/tuner007\/repos","events_url":"https:\/\/api.github.com\/users\/tuner007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tuner007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T19:30:12Z","updated_at":"2020-12-09T14:42:53Z","closed_at":"2020-12-09T14:42:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1214","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1214","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1214.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1214.patch","merged_at":"2020-12-09T14:42:53Z"},"body":"This dataset consists of 3048 similar and dissimilar medical question pairs hand-generated and labeled by Curai's doctors.\r\nDataset : https:\/\/github.com\/curai\/medical-question-pair-dataset\r\nPaper : https:\/\/drive.google.com\/file\/d\/1CHPGBXkvZuZc8hpr46HeHU6U6jnVze-s\/view","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1214\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1214\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1213","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1213\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1213\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1213\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1213","id":757983884,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjM4NzEz","number":1213,"title":"add taskmaster3","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-06T17:56:03Z","updated_at":"2020-12-09T11:05:10Z","closed_at":"2020-12-09T11:00:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1213","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1213","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1213.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1213.patch","merged_at":"2020-12-09T11:00:29Z"},"body":"Adding Taskmaster-3 dataset\r\nhttps:\/\/github.com\/google-research-datasets\/Taskmaster\/tree\/master\/TM-3-2020.\r\n\r\nThe dataset structure almost same as original dataset with these two changes\r\n\r\n1. In original dataset, each `apis` has a `args` filed which is a `dict` with variable keys, which represent the name and value of the args. Here converted that to a `list` of `dict` with keys `arg_name` and `arg_value`. For ex.\r\n\r\n```python\r\nargs = {\"name.movie\": \"Mulan\", \"name.theater\": \": \"Mountain AMC 16\"}\r\n```\r\nbecomes \r\n```python\r\n[\r\n {\r\n \"arg_name\": \"name.movie\",\r\n \"arg_value\": \"Mulan\"\r\n },\r\n {\r\n \"arg_name\": \"name.theater\",\r\n \"arg_value\": \"Mountain AMC 16\"\r\n }\r\n]\r\n```\r\n\r\n2. Each `apis` has a `response` which is also a `dict` with variable keys representing response name\/type and it's value. As above converted it to `list` of `dict` with keys `response_name` and `response_value`.\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1213\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1213\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1212","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1212\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1212\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1212\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1212","id":757978795,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjM1MTky","number":1212,"title":"Add Sanskrit Classic texts in datasets","user":{"login":"parmarsuraj99","id":9317265,"node_id":"MDQ6VXNlcjkzMTcyNjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9317265?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/parmarsuraj99","html_url":"https:\/\/github.com\/parmarsuraj99","followers_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/followers","following_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/orgs","repos_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/repos","events_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-06T17:31:31Z","updated_at":"2020-12-07T19:04:08Z","closed_at":"2020-12-07T19:04:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1212","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1212","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1212.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1212.patch","merged_at":"2020-12-07T19:04:08Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1212\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1212\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1211","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1211\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1211\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1211\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1211","id":757973719,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjMxNDY3","number":1211,"title":"Add large spanish corpus","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T17:06:50Z","updated_at":"2020-12-09T13:36:36Z","closed_at":"2020-12-09T13:36:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1211","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1211","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1211.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1211.patch","merged_at":"2020-12-09T13:36:36Z"},"body":"Adds a collection of Spanish corpora that can be useful for pretraining language models. \r\n\r\nFollowing a nice suggestion from @yjernite we provide the user with three main ways to preprocess \/ load either \r\n\r\n* the whole corpus (17GB!)\r\n* one specific sub-corpus\r\n* the whole corpus, but return a single split. this is useful if you want to cache the whole preprocessing step once and interact with individual sub-corpora\r\n\r\nSee the dataset card for more details.\r\n\r\nReady for review!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1211\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1211\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1210","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1210\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1210\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1210\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1210","id":757966959,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjI2NDQ2","number":1210,"title":"Add XSUM Hallucination Annotations Dataset","user":{"login":"vineeths96","id":50873201,"node_id":"MDQ6VXNlcjUwODczMjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50873201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vineeths96","html_url":"https:\/\/github.com\/vineeths96","followers_url":"https:\/\/api.github.com\/users\/vineeths96\/followers","following_url":"https:\/\/api.github.com\/users\/vineeths96\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vineeths96\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vineeths96\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vineeths96\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vineeths96\/orgs","repos_url":"https:\/\/api.github.com\/users\/vineeths96\/repos","events_url":"https:\/\/api.github.com\/users\/vineeths96\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vineeths96\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-06T16:40:19Z","updated_at":"2020-12-20T13:34:56Z","closed_at":"2020-12-16T16:57:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1210","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1210","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1210.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1210.patch","merged_at":"2020-12-16T16:57:11Z"},"body":"Adding Google [XSum Hallucination Annotations](https:\/\/github.com\/google-research-datasets\/xsum_hallucination_annotations) dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1210\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1210\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1209","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1209\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1209\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1209\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1209","id":757965934,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjI1NzMw","number":1209,"title":"[AfriBooms] Dataset exists already","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-06T16:35:13Z","updated_at":"2020-12-07T16:52:24Z","closed_at":"2020-12-07T16:52:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1209","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1209","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1209.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1209.patch","merged_at":"2020-12-07T16:52:23Z"},"body":"When trying to add \"AfriBooms\": https:\/\/docs.google.com\/spreadsheets\/d\/12ShVow0M6RavnzbBEabm5j5dv12zBaf0y-niwEPPlo4\/edit#gid=1386399609 I noticed that the dataset exists already as a config of Universal Dependencies (universal_dependencies.py). I checked and the data exactly matches so that the new data link does not give any new data.\r\n\r\nThis PR improves the config's description a bit by linking to the paper.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1209\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1209\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1208","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1208\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1208\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1208\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1208","id":757961368,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjIyMzQ4","number":1208,"title":"Add HKCanCor","user":{"login":"j-chim","id":22435209,"node_id":"MDQ6VXNlcjIyNDM1MjA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22435209?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/j-chim","html_url":"https:\/\/github.com\/j-chim","followers_url":"https:\/\/api.github.com\/users\/j-chim\/followers","following_url":"https:\/\/api.github.com\/users\/j-chim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/j-chim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/j-chim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/j-chim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/j-chim\/orgs","repos_url":"https:\/\/api.github.com\/users\/j-chim\/repos","events_url":"https:\/\/api.github.com\/users\/j-chim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/j-chim\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T16:14:43Z","updated_at":"2020-12-06T20:23:17Z","closed_at":"2020-12-06T20:21:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1208","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1208","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1208.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1208.patch","merged_at":null},"body":"(Apologies, didn't manage the branches properly and the PR got too messy. Going to open a new PR with everything in order)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1208\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1208\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1207","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1207\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1207\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1207\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1207","id":757953830,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjE3MDA4","number":1207,"title":"Add msr_genomics_kbcomp Dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T15:40:05Z","updated_at":"2020-12-07T15:55:17Z","closed_at":"2020-12-07T15:55:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1207","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1207","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1207.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1207.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1207\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1207\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1206","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1206\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1206\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1206\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1206","id":757952992,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjE2NDYw","number":1206,"title":"Adding Enriched WebNLG dataset","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-06T15:36:20Z","updated_at":"2020-12-09T09:40:32Z","closed_at":"2020-12-09T09:40:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1206","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1206","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1206.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1206.patch","merged_at":null},"body":"This pull requests adds the `en` and `de` versions of the [Enriched WebNLG](https:\/\/github.com\/ThiagoCF05\/webnlg) dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1206\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1206\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1205","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1205\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1205\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1205\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1205","id":757942403,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjA4NDI1","number":1205,"title":"add lst20 with manual download","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-06T14:49:10Z","updated_at":"2020-12-09T16:33:10Z","closed_at":"2020-12-09T16:33:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1205","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1205","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1205.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1205.patch","merged_at":"2020-12-09T16:33:10Z"},"body":"passed on local:\r\n```\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_dataset_all_configs_lst20\r\n```\r\nNot sure how to test:\r\n```\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_real_dataset_lst20\r\n```\r\n\r\n```\r\nLST20 Corpus is a dataset for Thai language processing developed by National Electronics and Computer Technology Center (NECTEC), Thailand.\r\nIt offers five layers of linguistic annotation: word boundaries, POS tagging, named entities, clause boundaries, and sentence boundaries.\r\nAt a large scale, it consists of 3,164,002 words, 288,020 named entities, 248,181 clauses, and 74,180 sentences, while it is annotated with\r\n16 distinct POS tags. All 3,745 documents are also annotated with one of 15 news genres. Regarding its sheer size, this dataset is\r\nconsidered large enough for developing joint neural models for NLP.\r\nManually download at https:\/\/aiforthai.in.th\/corpus.php\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1205\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1205\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1204","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1204\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1204\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1204\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1204","id":757939475,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjA2MzE3","number":1204,"title":"adding meta_woz dataset","user":{"login":"pacman100","id":13534540,"node_id":"MDQ6VXNlcjEzNTM0NTQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13534540?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pacman100","html_url":"https:\/\/github.com\/pacman100","followers_url":"https:\/\/api.github.com\/users\/pacman100\/followers","following_url":"https:\/\/api.github.com\/users\/pacman100\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pacman100\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pacman100\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pacman100\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pacman100\/orgs","repos_url":"https:\/\/api.github.com\/users\/pacman100\/repos","events_url":"https:\/\/api.github.com\/users\/pacman100\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pacman100\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T14:34:13Z","updated_at":"2020-12-16T15:05:25Z","closed_at":"2020-12-16T15:05:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1204","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1204","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1204.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1204.patch","merged_at":"2020-12-16T15:05:24Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1204\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1204\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1203","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1203\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1203\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1203\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1203","id":757935170,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjAzMTc0","number":1203,"title":"Add Neural Code Search Dataset","user":{"login":"vinaykudari","id":34424769,"node_id":"MDQ6VXNlcjM0NDI0NzY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34424769?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vinaykudari","html_url":"https:\/\/github.com\/vinaykudari","followers_url":"https:\/\/api.github.com\/users\/vinaykudari\/followers","following_url":"https:\/\/api.github.com\/users\/vinaykudari\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vinaykudari\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vinaykudari\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vinaykudari\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vinaykudari\/orgs","repos_url":"https:\/\/api.github.com\/users\/vinaykudari\/repos","events_url":"https:\/\/api.github.com\/users\/vinaykudari\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vinaykudari\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-06T14:12:39Z","updated_at":"2020-12-09T16:40:15Z","closed_at":"2020-12-09T16:40:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1203","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1203","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1203.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1203.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1203\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1203\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1202","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1202\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1202\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1202\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1202","id":757934408,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMjAyNjE0","number":1202,"title":"Medical question pairs","user":{"login":"tuner007","id":46425391,"node_id":"MDQ6VXNlcjQ2NDI1Mzkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46425391?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tuner007","html_url":"https:\/\/github.com\/tuner007","followers_url":"https:\/\/api.github.com\/users\/tuner007\/followers","following_url":"https:\/\/api.github.com\/users\/tuner007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tuner007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tuner007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tuner007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tuner007\/orgs","repos_url":"https:\/\/api.github.com\/users\/tuner007\/repos","events_url":"https:\/\/api.github.com\/users\/tuner007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tuner007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T14:09:07Z","updated_at":"2020-12-06T17:41:28Z","closed_at":"2020-12-06T17:41:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1202","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1202","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1202.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1202.patch","merged_at":null},"body":"This dataset consists of 3048 similar and dissimilar medical question pairs hand-generated and labeled by Curai's doctors.\r\nDataset : https:\/\/github.com\/curai\/medical-question-pair-dataset\r\nPaper : https:\/\/drive.google.com\/file\/d\/1CHPGBXkvZuZc8hpr46HeHU6U6jnVze-s\/view\r\n**No splits added**","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1202\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1202\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1201","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1201\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1201\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1201\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1201","id":757927941,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTk3OTI2","number":1201,"title":"adding medical-questions-pairs","user":{"login":"tuner007","id":46425391,"node_id":"MDQ6VXNlcjQ2NDI1Mzkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46425391?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tuner007","html_url":"https:\/\/github.com\/tuner007","followers_url":"https:\/\/api.github.com\/users\/tuner007\/followers","following_url":"https:\/\/api.github.com\/users\/tuner007\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tuner007\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tuner007\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tuner007\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tuner007\/orgs","repos_url":"https:\/\/api.github.com\/users\/tuner007\/repos","events_url":"https:\/\/api.github.com\/users\/tuner007\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tuner007\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T13:36:52Z","updated_at":"2020-12-06T13:39:44Z","closed_at":"2020-12-06T13:39:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1201","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1201","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1201.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1201.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1201\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1201\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1200","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1200\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1200\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1200\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1200","id":757926823,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTk3MDk0","number":1200,"title":"Update ADD_NEW_DATASET.md","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T13:31:32Z","updated_at":"2020-12-07T08:32:39Z","closed_at":"2020-12-07T08:32:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1200","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1200","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1200.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1200.patch","merged_at":"2020-12-07T08:32:39Z"},"body":"Windows needs special treatment again: unfortunately adding `torch` to the requirements does not work well (crashing the installation). Users should first install torch manually and then continue with the other commands.\r\n\r\nThis issue arises all the time when adding torch as a dependency, but because so many novice users seem to participate in adding datasets, it may be useful to add an explicit note for Windows users to ensure that they do not run into issues.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1200\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1200\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1199","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1199\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1199\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1199\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1199","id":757909237,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTg0Nzk3","number":1199,"title":"Turkish NER dataset, script works fine, couldn't generate dummy data","user":{"login":"merveenoyan","id":53175384,"node_id":"MDQ6VXNlcjUzMTc1Mzg0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53175384?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/merveenoyan","html_url":"https:\/\/github.com\/merveenoyan","followers_url":"https:\/\/api.github.com\/users\/merveenoyan\/followers","following_url":"https:\/\/api.github.com\/users\/merveenoyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/merveenoyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/merveenoyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/merveenoyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/merveenoyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/merveenoyan\/repos","events_url":"https:\/\/api.github.com\/users\/merveenoyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/merveenoyan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-06T12:00:03Z","updated_at":"2020-12-16T16:13:24Z","closed_at":"2020-12-16T16:13:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1199","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1199","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1199.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1199.patch","merged_at":null},"body":"I've written the script (Turkish_NER.py) that includes dataset. The dataset is a zip inside another zip, and it's extracted as .DUMP file. However, after preprocessing I only get .arrow file. After I ran the script with no error messages, I get .arrow file of dataset, LICENSE and dataset_info.json. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1199\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1199\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1198","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1198\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1198\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1198\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1198","id":757903453,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTgwNjAz","number":1198,"title":"Add ALT","user":{"login":"chameleonTK","id":6429850,"node_id":"MDQ6VXNlcjY0Mjk4NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6429850?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chameleonTK","html_url":"https:\/\/github.com\/chameleonTK","followers_url":"https:\/\/api.github.com\/users\/chameleonTK\/followers","following_url":"https:\/\/api.github.com\/users\/chameleonTK\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chameleonTK\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chameleonTK\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chameleonTK\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chameleonTK\/orgs","repos_url":"https:\/\/api.github.com\/users\/chameleonTK\/repos","events_url":"https:\/\/api.github.com\/users\/chameleonTK\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chameleonTK\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-06T11:25:30Z","updated_at":"2020-12-10T04:18:12Z","closed_at":"2020-12-10T04:18:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1198","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1198","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1198.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1198.patch","merged_at":null},"body":"ALT dataset -- https:\/\/www2.nict.go.jp\/astrec-att\/member\/mutiyama\/ALT\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1198\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1198\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1197","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1197\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1197\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1197\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1197","id":757900160,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTc4MTIz","number":1197,"title":"add taskmaster-2","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T11:05:18Z","updated_at":"2020-12-07T15:22:43Z","closed_at":"2020-12-07T15:22:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1197","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1197","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1197.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1197.patch","merged_at":"2020-12-07T15:22:43Z"},"body":"Adding taskmaster-2 dataset.\r\nhttps:\/\/github.com\/google-research-datasets\/Taskmaster\/tree\/master\/TM-2-2020","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1197\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1197\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1196","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1196\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1196\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1196\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1196","id":757894920,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTc0NjU2","number":1196,"title":"Add IWSLT'15 English-Vietnamese machine translation Data","user":{"login":"Nilanshrajput","id":28673745,"node_id":"MDQ6VXNlcjI4NjczNzQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28673745?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Nilanshrajput","html_url":"https:\/\/github.com\/Nilanshrajput","followers_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/followers","following_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/orgs","repos_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/repos","events_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Nilanshrajput\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-06T10:36:31Z","updated_at":"2020-12-11T18:26:51Z","closed_at":"2020-12-11T18:26:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1196","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1196","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1196.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1196.patch","merged_at":"2020-12-11T18:26:51Z"},"body":"Preprocessed Dataset from IWSLT'15 English-Vietnamese machine translation: English-Vietnamese.\r\n\r\nfrom https:\/\/nlp.stanford.edu\/projects\/nmt\/data\/iwslt15.en-vi\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1196\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1196\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1195","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1195\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1195\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1195\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1195","id":757889045,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTcwMjY2","number":1195,"title":"addition of py_ast","user":{"login":"reshinthadithyan","id":36307201,"node_id":"MDQ6VXNlcjM2MzA3MjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36307201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/reshinthadithyan","html_url":"https:\/\/github.com\/reshinthadithyan","followers_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/followers","following_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/repos","events_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-06T10:00:52Z","updated_at":"2020-12-08T06:19:24Z","closed_at":"2020-12-08T06:19:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1195","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1195","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1195.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1195.patch","merged_at":null},"body":"The dataset consists of parsed Parsed ASTs that were used to train and evaluate the DeepSyn tool. \r\nThe Python programs are collected from GitHub repositories\r\nby removing duplicate files, removing project forks (copy of another existing repository)\r\n,keeping only programs that parse and have at most 30'000 nodes in the AST and \r\nwe aim to remove obfuscated files","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1195\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1195\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1194","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1194\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1194\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1194\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1194","id":757880647,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTY0MDcz","number":1194,"title":"Add msr_text_compression","user":{"login":"jeromeku","id":2455711,"node_id":"MDQ6VXNlcjI0NTU3MTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2455711?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jeromeku","html_url":"https:\/\/github.com\/jeromeku","followers_url":"https:\/\/api.github.com\/users\/jeromeku\/followers","following_url":"https:\/\/api.github.com\/users\/jeromeku\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jeromeku\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jeromeku\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jeromeku\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jeromeku\/orgs","repos_url":"https:\/\/api.github.com\/users\/jeromeku\/repos","events_url":"https:\/\/api.github.com\/users\/jeromeku\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jeromeku\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-06T09:06:11Z","updated_at":"2020-12-09T10:53:45Z","closed_at":"2020-12-09T10:53:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1194","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1194","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1194.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1194.patch","merged_at":"2020-12-09T10:53:45Z"},"body":"Add [MSR Abstractive Text Compression Dataset](https:\/\/msropendata.com\/datasets\/f8ce2ec9-7fbd-48f7-a8bb-2d2279373563)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1194\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1194\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1193","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1193\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1193\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1193\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1193","id":757840830,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTM1NDAy","number":1193,"title":"add taskmaster-1","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T04:09:57Z","updated_at":"2020-12-07T15:23:24Z","closed_at":"2020-12-07T15:08:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1193","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1193","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1193.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1193.patch","merged_at":"2020-12-07T15:08:39Z"},"body":"Adding Taskmaster-1 dataset\r\nhttps:\/\/github.com\/google-research-datasets\/Taskmaster\/tree\/master\/TM-1-2019","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1193\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1193\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1192","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1192\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1192\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1192\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1192","id":757839671,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTM0NjI3","number":1192,"title":"Add NewsPH_NLI dataset","user":{"login":"anaerobeth","id":3663322,"node_id":"MDQ6VXNlcjM2NjMzMjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3663322?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anaerobeth","html_url":"https:\/\/github.com\/anaerobeth","followers_url":"https:\/\/api.github.com\/users\/anaerobeth\/followers","following_url":"https:\/\/api.github.com\/users\/anaerobeth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anaerobeth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anaerobeth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anaerobeth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anaerobeth\/orgs","repos_url":"https:\/\/api.github.com\/users\/anaerobeth\/repos","events_url":"https:\/\/api.github.com\/users\/anaerobeth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anaerobeth\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T04:00:31Z","updated_at":"2020-12-07T15:39:43Z","closed_at":"2020-12-07T15:39:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1192","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1192","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1192.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1192.patch","merged_at":"2020-12-07T15:39:43Z"},"body":"This PR adds the NewsPH-NLI Dataset, the first benchmark dataset for sentence entailment in the low-resource Filipino language. Constructed through exploting the structure of news articles. Contains 600,000 premise-hypothesis pairs, in 70-15-15 split for training, validation, and testing.\r\n\r\nLink to the paper: https:\/\/arxiv.org\/pdf\/2010.11574.pdf\r\n\r\nLink to the dataset\/repo: https:\/\/github.com\/jcblaisecruz02\/Filipino-Text-Benchmarks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1192\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1192\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1191","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1191\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1191\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1191\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1191","id":757836654,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTMyNTg1","number":1191,"title":"Added Translator Human Parity Data For a Chinese-English news transla\u2026","user":{"login":"leoxzhao","id":7915719,"node_id":"MDQ6VXNlcjc5MTU3MTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7915719?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/leoxzhao","html_url":"https:\/\/github.com\/leoxzhao","followers_url":"https:\/\/api.github.com\/users\/leoxzhao\/followers","following_url":"https:\/\/api.github.com\/users\/leoxzhao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/leoxzhao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/leoxzhao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/leoxzhao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/leoxzhao\/orgs","repos_url":"https:\/\/api.github.com\/users\/leoxzhao\/repos","events_url":"https:\/\/api.github.com\/users\/leoxzhao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/leoxzhao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-06T03:34:13Z","updated_at":"2020-12-09T13:22:45Z","closed_at":"2020-12-09T13:22:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1191","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1191","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1191.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1191.patch","merged_at":"2020-12-09T13:22:45Z"},"body":"\u2026tion system from Open dataset list for Dataset sprint, Microsoft Datasets tab.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1191\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1191\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1190","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1190\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1190\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1190\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1190","id":757833698,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTMwNTM0","number":1190,"title":"Add Fake News Detection in Filipino dataset","user":{"login":"anaerobeth","id":3663322,"node_id":"MDQ6VXNlcjM2NjMzMjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3663322?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anaerobeth","html_url":"https:\/\/github.com\/anaerobeth","followers_url":"https:\/\/api.github.com\/users\/anaerobeth\/followers","following_url":"https:\/\/api.github.com\/users\/anaerobeth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anaerobeth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anaerobeth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anaerobeth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anaerobeth\/orgs","repos_url":"https:\/\/api.github.com\/users\/anaerobeth\/repos","events_url":"https:\/\/api.github.com\/users\/anaerobeth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anaerobeth\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-06T03:12:15Z","updated_at":"2020-12-07T15:39:27Z","closed_at":"2020-12-07T15:39:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1190","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1190","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1190.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1190.patch","merged_at":"2020-12-07T15:39:27Z"},"body":"This PR adds the Fake News Filipino Dataset, a low-resource fake news detection corpora in Filipino. Contains 3,206 expertly-labeled news samples, half of which are real and half of which are fake.\r\n\r\nLink to the paper: http:\/\/www.lrec-conf.org\/proceedings\/lrec2020\/index.html\r\n\r\nLink to the dataset\/repo: https:\/\/github.com\/jcblaisecruz02\/Tagalog-fake-news","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1190\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1190\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1189","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1189\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1189\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1189\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1189","id":757831035,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTI4NjY1","number":1189,"title":"Add Dengue dataset in Filipino","user":{"login":"anaerobeth","id":3663322,"node_id":"MDQ6VXNlcjM2NjMzMjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3663322?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anaerobeth","html_url":"https:\/\/github.com\/anaerobeth","followers_url":"https:\/\/api.github.com\/users\/anaerobeth\/followers","following_url":"https:\/\/api.github.com\/users\/anaerobeth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anaerobeth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anaerobeth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anaerobeth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anaerobeth\/orgs","repos_url":"https:\/\/api.github.com\/users\/anaerobeth\/repos","events_url":"https:\/\/api.github.com\/users\/anaerobeth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anaerobeth\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T02:50:47Z","updated_at":"2020-12-07T15:38:58Z","closed_at":"2020-12-07T15:38:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1189","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1189","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1189.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1189.patch","merged_at":"2020-12-07T15:38:58Z"},"body":"This PR adds the Dengue Dataset, a benchmark dataset for low-resource multiclass classification, with 4,015 training, 500 testing, and 500 validation examples, each labeled as part of five classes. Each sample can be a part of multiple classes. Collected as tweets.\r\n\r\nLink to the paper: https:\/\/ieeexplore.ieee.org\/document\/8459963\r\n\r\nLink to the dataset\/repo: https:\/\/github.com\/jcblaisecruz02\/Filipino-Text-Benchmarks\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1189\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1189\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1188","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1188\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1188\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1188\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1188","id":757827407,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTI2MTcw","number":1188,"title":"adding hind_encorp dataset","user":{"login":"rahul-art","id":56379013,"node_id":"MDQ6VXNlcjU2Mzc5MDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56379013?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rahul-art","html_url":"https:\/\/github.com\/rahul-art","followers_url":"https:\/\/api.github.com\/users\/rahul-art\/followers","following_url":"https:\/\/api.github.com\/users\/rahul-art\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rahul-art\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rahul-art\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rahul-art\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rahul-art\/orgs","repos_url":"https:\/\/api.github.com\/users\/rahul-art\/repos","events_url":"https:\/\/api.github.com\/users\/rahul-art\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rahul-art\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":13,"created_at":"2020-12-06T02:18:45Z","updated_at":"2020-12-11T17:40:41Z","closed_at":"2020-12-11T17:40:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1188","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1188","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1188.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1188.patch","merged_at":null},"body":"adding Hindi_Encorp05 dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1188\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1188\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1187","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1187\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1187\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1187\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1187","id":757826707,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTI1NjU3","number":1187,"title":"Added AQUA-RAT (Algebra Question Answering with Rationales) Dataset","user":{"login":"arkhalid","id":14899066,"node_id":"MDQ6VXNlcjE0ODk5MDY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14899066?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arkhalid","html_url":"https:\/\/github.com\/arkhalid","followers_url":"https:\/\/api.github.com\/users\/arkhalid\/followers","following_url":"https:\/\/api.github.com\/users\/arkhalid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arkhalid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arkhalid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arkhalid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arkhalid\/orgs","repos_url":"https:\/\/api.github.com\/users\/arkhalid\/repos","events_url":"https:\/\/api.github.com\/users\/arkhalid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arkhalid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-06T02:12:52Z","updated_at":"2020-12-07T15:37:12Z","closed_at":"2020-12-07T15:37:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1187","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1187","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1187.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1187.patch","merged_at":"2020-12-07T15:37:12Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1187\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1187\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1186","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1186\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1186\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1186\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1186","id":757826660,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTI1NjE4","number":1186,"title":"all test passed ","user":{"login":"rahul-art","id":56379013,"node_id":"MDQ6VXNlcjU2Mzc5MDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56379013?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rahul-art","html_url":"https:\/\/github.com\/rahul-art","followers_url":"https:\/\/api.github.com\/users\/rahul-art\/followers","following_url":"https:\/\/api.github.com\/users\/rahul-art\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rahul-art\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rahul-art\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rahul-art\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rahul-art\/orgs","repos_url":"https:\/\/api.github.com\/users\/rahul-art\/repos","events_url":"https:\/\/api.github.com\/users\/rahul-art\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rahul-art\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-06T02:12:32Z","updated_at":"2020-12-07T15:06:55Z","closed_at":"2020-12-07T15:06:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1186","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1186","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1186.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1186.patch","merged_at":null},"body":"need help creating dummy data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1186\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1186\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1185","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1185\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1185\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1185\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1185","id":757825413,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTI0NzE1","number":1185,"title":"Add Hate Speech Dataset in Filipino","user":{"login":"anaerobeth","id":3663322,"node_id":"MDQ6VXNlcjM2NjMzMjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3663322?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anaerobeth","html_url":"https:\/\/github.com\/anaerobeth","followers_url":"https:\/\/api.github.com\/users\/anaerobeth\/followers","following_url":"https:\/\/api.github.com\/users\/anaerobeth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anaerobeth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anaerobeth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anaerobeth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anaerobeth\/orgs","repos_url":"https:\/\/api.github.com\/users\/anaerobeth\/repos","events_url":"https:\/\/api.github.com\/users\/anaerobeth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anaerobeth\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-06T02:01:56Z","updated_at":"2020-12-07T15:35:33Z","closed_at":"2020-12-07T15:35:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1185","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1185","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1185.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1185.patch","merged_at":"2020-12-07T15:35:33Z"},"body":"This PR adds the Hate Speech Dataset, a text classification dataset in Filipino, consisting 10k tweets (training set) that are labeled as hate speech or non-hate speech. Released with 4,232 validation and 4,232 testing samples. Collected during the 2016 Philippine Presidential Elections.\r\n\r\nLink to the paper: https:\/\/pcj.csp.org.ph\/index.php\/pcj\/issue\/download\/29\/PCJ%20V14%20N1%20pp1-14%202019\r\n\r\nLink to the dataset\/repo: https:\/\/github.com\/jcblaisecruz02\/Filipino-Text-Benchmarks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1185\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1185\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1184","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1184\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1184\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1184\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1184","id":757807583,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTExNjk4","number":1184,"title":"Add Adversarial SQuAD dataset","user":{"login":"cceyda","id":15624271,"node_id":"MDQ6VXNlcjE1NjI0Mjcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15624271?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cceyda","html_url":"https:\/\/github.com\/cceyda","followers_url":"https:\/\/api.github.com\/users\/cceyda\/followers","following_url":"https:\/\/api.github.com\/users\/cceyda\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cceyda\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cceyda\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cceyda\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cceyda\/orgs","repos_url":"https:\/\/api.github.com\/users\/cceyda\/repos","events_url":"https:\/\/api.github.com\/users\/cceyda\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cceyda\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-05T23:51:57Z","updated_at":"2020-12-16T16:12:58Z","closed_at":"2020-12-16T16:12:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1184","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1184","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1184.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1184.patch","merged_at":"2020-12-16T16:12:58Z"},"body":"# Adversarial SQuAD\r\n\r\nAdding the Adversarial [SQuAD](https:\/\/github.com\/robinjia\/adversarial-squad) dataset as part of the sprint \ud83c\udf89 \r\nThis dataset adds adversarial sentences to a subset of the SQuAD dataset's dev examples. How to get the original squad example id is explained in readme->Data Instances. The whole data is intended for use in evaluation. (Which could of course be also used for training if one wants). So there is no classical train\/val\/test split, but a split based on the number of adversaries added.\r\n\r\nThere are 2 splits of this dataset:\r\n\r\n- AddSent: Has up to five candidate adversarial sentences that don't answer the question, but have a lot of words in common with the question. This adversary is does not query the model in any way.\r\n- AddOneSent: Similar to AddSent, but just one candidate sentences was picked at random. This adversary is does not query the model in any way.\r\n\r\n(The AddAny and AddCommon datasets mentioned in the paper are dynamically generated based on model's output distribution thus are not included here)\r\n\r\nThe failing test look like some unrelated timeout thing, will probably clear if rerun.\r\n- [x] All tests passed\r\n- [x] Added dummy data\r\n- [x] Added data card (as much as I could)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1184\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1184\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1183","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1183\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1183\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1183\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1183","id":757806570,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTEwOTY4","number":1183,"title":"add mkb dataset","user":{"login":"vasudevgupta7","id":53136577,"node_id":"MDQ6VXNlcjUzMTM2NTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53136577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vasudevgupta7","html_url":"https:\/\/github.com\/vasudevgupta7","followers_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/followers","following_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/orgs","repos_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/repos","events_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-05T23:44:33Z","updated_at":"2020-12-09T09:38:50Z","closed_at":"2020-12-09T09:38:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1183","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1183","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1183.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1183.patch","merged_at":"2020-12-09T09:38:50Z"},"body":"This PR will add Mann Ki Baat dataset (parallel data for Indian languages).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1183\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1183\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1182","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1182\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1182\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1182\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1182","id":757804877,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTA5Nzgx","number":1182,"title":"ADD COVID-QA dataset","user":{"login":"olinguyen","id":4341867,"node_id":"MDQ6VXNlcjQzNDE4Njc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4341867?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/olinguyen","html_url":"https:\/\/github.com\/olinguyen","followers_url":"https:\/\/api.github.com\/users\/olinguyen\/followers","following_url":"https:\/\/api.github.com\/users\/olinguyen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/olinguyen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/olinguyen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/olinguyen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/olinguyen\/orgs","repos_url":"https:\/\/api.github.com\/users\/olinguyen\/repos","events_url":"https:\/\/api.github.com\/users\/olinguyen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/olinguyen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-05T23:31:56Z","updated_at":"2020-12-28T13:23:14Z","closed_at":"2020-12-07T14:23:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1182","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1182","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1182.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1182.patch","merged_at":"2020-12-07T14:23:27Z"},"body":"This PR adds the COVID-QA dataset, a question answering dataset consisting of 2,019 question\/answer pairs annotated by volunteer biomedical experts on scientific articles related to COVID-19\r\n\r\nLink to the paper: https:\/\/openreview.net\/forum?id=JENSKEEzsoU\r\nLink to the dataset\/repo: https:\/\/github.com\/deepset-ai\/COVID-QA","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1182\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1182\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1181","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1181\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1181\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1181\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1181","id":757791992,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMTAwNjYz","number":1181,"title":"added emotions detection in arabic dataset","user":{"login":"abdulelahsm","id":28743265,"node_id":"MDQ6VXNlcjI4NzQzMjY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28743265?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abdulelahsm","html_url":"https:\/\/github.com\/abdulelahsm","followers_url":"https:\/\/api.github.com\/users\/abdulelahsm\/followers","following_url":"https:\/\/api.github.com\/users\/abdulelahsm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abdulelahsm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abdulelahsm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abdulelahsm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abdulelahsm\/orgs","repos_url":"https:\/\/api.github.com\/users\/abdulelahsm\/repos","events_url":"https:\/\/api.github.com\/users\/abdulelahsm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abdulelahsm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-05T22:08:46Z","updated_at":"2020-12-21T09:53:51Z","closed_at":"2020-12-21T09:53:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1181","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1181","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1181.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1181.patch","merged_at":"2020-12-21T09:53:51Z"},"body":"Dataset for Emotions detection in Arabic text\r\n\r\nmore info: https:\/\/github.com\/AmrMehasseb\/Emotional-Tone","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1181\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1181\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1180","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1180\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1180\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1180\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1180","id":757784612,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDk1MzI2","number":1180,"title":"Add KorQuAD v2 Dataset","user":{"login":"cceyda","id":15624271,"node_id":"MDQ6VXNlcjE1NjI0Mjcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15624271?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cceyda","html_url":"https:\/\/github.com\/cceyda","followers_url":"https:\/\/api.github.com\/users\/cceyda\/followers","following_url":"https:\/\/api.github.com\/users\/cceyda\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cceyda\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cceyda\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cceyda\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cceyda\/orgs","repos_url":"https:\/\/api.github.com\/users\/cceyda\/repos","events_url":"https:\/\/api.github.com\/users\/cceyda\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cceyda\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-05T21:33:34Z","updated_at":"2020-12-16T16:10:30Z","closed_at":"2020-12-16T16:10:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1180","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1180","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1180.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1180.patch","merged_at":"2020-12-16T16:10:30Z"},"body":"# The Korean Question Answering Dataset v2\r\nAdding the [KorQuAD](https:\/\/korquad.github.io\/) v2 dataset as part of the sprint \ud83c\udf89 \r\nThis dataset is very similar to SQuAD and is an extension of [squad_kor_v1](https:\/\/github.com\/huggingface\/datasets\/pull\/1178) which is why I added it as `squad_kor_v2`. \r\n\r\n- Crowd generated questions and answer (1-answer per question) for Wikipedia articles. Differently from V1 it includes the html structure and markup, which makes it a different enough dataset. (doesn't share ids between v1 and v2 either)\r\n\r\n- [x] All tests passed\r\n- [x] Added dummy data\r\n- [x] Added data card (as much as I could)\r\n\r\nEdit: \ud83e\udd26 looks like squad_kor_v1 commit sneaked in here too","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1180\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1180\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1179","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1179\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1179\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1179\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1179","id":757784074,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDk0OTYz","number":1179,"title":"Small update to the doc: add flatten_indices in doc","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T21:30:10Z","updated_at":"2020-12-07T13:42:57Z","closed_at":"2020-12-07T13:42:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1179","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1179","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1179.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1179.patch","merged_at":"2020-12-07T13:42:56Z"},"body":"Small update to the doc: add flatten_indices in doc","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1179\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1179\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1178","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1178\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1178\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1178\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1178","id":757783435,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDk0NTIx","number":1178,"title":"Add KorQuAD v1 Dataset","user":{"login":"cceyda","id":15624271,"node_id":"MDQ6VXNlcjE1NjI0Mjcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15624271?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cceyda","html_url":"https:\/\/github.com\/cceyda","followers_url":"https:\/\/api.github.com\/users\/cceyda\/followers","following_url":"https:\/\/api.github.com\/users\/cceyda\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cceyda\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cceyda\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cceyda\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cceyda\/orgs","repos_url":"https:\/\/api.github.com\/users\/cceyda\/repos","events_url":"https:\/\/api.github.com\/users\/cceyda\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cceyda\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T21:25:46Z","updated_at":"2020-12-07T13:41:37Z","closed_at":"2020-12-07T13:41:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1178","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1178","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1178.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1178.patch","merged_at":"2020-12-07T13:41:37Z"},"body":"# The Korean Question Answering Dataset\r\nAdding the [KorQuAD](https:\/\/korquad.github.io\/KorQuad%201.0\/) v1 dataset as part of the sprint \ud83c\udf89 \r\nThis dataset is very similar to SQuAD which is why I added it as `squad_kor_v1`. There is also a v2 which I added [here](https:\/\/github.com\/huggingface\/datasets\/pull\/1180).\r\n\r\n- Crowd generated questions and answer (1-answer per question) for Wikipedia articles.\r\n\r\n- [x] All tests passed\r\n- [x] Added dummy data\r\n- [x] Added data card (as much as I could)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1178\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1178\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1177","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1177\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1177\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1177\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1177","id":757778684,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDkxMTQ3","number":1177,"title":"Add Korean NER dataset","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-05T20:56:00Z","updated_at":"2020-12-06T20:19:48Z","closed_at":"2020-12-06T20:19:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1177","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1177","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1177.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1177.patch","merged_at":null},"body":"This PR adds the [Korean named entity recognition dataset](https:\/\/github.com\/kmounlp\/NER). This dataset has been used in many downstream tasks, such as training [KoBERT](https:\/\/github.com\/SKTBrain\/KoBERT) for NER, as seen in this [KoBERT-CRF implementation](https:\/\/github.com\/eagle705\/pytorch-bert-crf-ner).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1177\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1177\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1176","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1176\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1176\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1176\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1176","id":757778365,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDkwOTMx","number":1176,"title":"Add OpenPI Dataset","user":{"login":"Bharat123rox","id":13381361,"node_id":"MDQ6VXNlcjEzMzgxMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13381361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Bharat123rox","html_url":"https:\/\/github.com\/Bharat123rox","followers_url":"https:\/\/api.github.com\/users\/Bharat123rox\/followers","following_url":"https:\/\/api.github.com\/users\/Bharat123rox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Bharat123rox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Bharat123rox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Bharat123rox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Bharat123rox\/orgs","repos_url":"https:\/\/api.github.com\/users\/Bharat123rox\/repos","events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-12-05T20:54:06Z","updated_at":"2021-09-06T09:19:50Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1176","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1176","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1176.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1176.patch","merged_at":null},"body":"Add the OpenPI Dataset by AI2 (AllenAI)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1176\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1176\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1175","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1175\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1175\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1175\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1175","id":757770077,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDg0OTYy","number":1175,"title":"added ReDial dataset","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-05T20:04:18Z","updated_at":"2020-12-07T13:21:43Z","closed_at":"2020-12-07T13:21:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1175","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1175","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1175.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1175.patch","merged_at":"2020-12-07T13:21:43Z"},"body":"Updating README\r\nDataset link: https:\/\/redialdata.github.io\/website\/datasheet","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1175\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1175\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1174","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1174\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1174\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1174\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1174","id":757768474,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDgzODUz","number":1174,"title":"Add Universal Morphologies","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-05T19:54:43Z","updated_at":"2021-01-26T16:50:16Z","closed_at":"2021-01-26T16:41:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1174","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1174","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1174.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1174.patch","merged_at":"2021-01-26T16:41:48Z"},"body":"Adding unimorph universal morphology annotations for 110 languages, pfew!!!\r\n\r\none lemma per row with all possible forms and annotations\r\n\r\nhttps:\/\/unimorph.github.io\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1174\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1174\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1173","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1173\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1173\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1173\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1173","id":757761967,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDc5MTk0","number":1173,"title":"add wikipedia biography dataset","user":{"login":"alejandrocros","id":39712560,"node_id":"MDQ6VXNlcjM5NzEyNTYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39712560?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alejandrocros","html_url":"https:\/\/github.com\/alejandrocros","followers_url":"https:\/\/api.github.com\/users\/alejandrocros\/followers","following_url":"https:\/\/api.github.com\/users\/alejandrocros\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alejandrocros\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alejandrocros\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alejandrocros\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alejandrocros\/orgs","repos_url":"https:\/\/api.github.com\/users\/alejandrocros\/repos","events_url":"https:\/\/api.github.com\/users\/alejandrocros\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alejandrocros\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-12-05T19:14:50Z","updated_at":"2020-12-07T11:13:14Z","closed_at":"2020-12-07T11:13:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1173","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1173","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1173.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1173.patch","merged_at":"2020-12-07T11:13:14Z"},"body":"My first PR containing the Wikipedia biographies dataset. I have followed all the steps in the [guide](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md). It passes all the tests.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1173\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1173\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1172","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1172\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1172\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1172\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1172","id":757758532,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDc2NzY3","number":1172,"title":"Add proto_qa dataset","user":{"login":"bpatidar","id":12439573,"node_id":"MDQ6VXNlcjEyNDM5NTcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12439573?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bpatidar","html_url":"https:\/\/github.com\/bpatidar","followers_url":"https:\/\/api.github.com\/users\/bpatidar\/followers","following_url":"https:\/\/api.github.com\/users\/bpatidar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bpatidar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bpatidar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bpatidar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bpatidar\/orgs","repos_url":"https:\/\/api.github.com\/users\/bpatidar\/repos","events_url":"https:\/\/api.github.com\/users\/bpatidar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bpatidar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-05T18:55:04Z","updated_at":"2020-12-07T11:12:24Z","closed_at":"2020-12-07T11:12:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1172","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1172","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1172.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1172.patch","merged_at":"2020-12-07T11:12:24Z"},"body":"Added dataset tags as required.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1172\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1172\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1171","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1171\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1171\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1171\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1171","id":757757000,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDc1NzE3","number":1171,"title":"Add imdb Urdu Reviews dataset.","user":{"login":"chaitnayabasava","id":44389205,"node_id":"MDQ6VXNlcjQ0Mzg5MjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44389205?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chaitnayabasava","html_url":"https:\/\/github.com\/chaitnayabasava","followers_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/followers","following_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/orgs","repos_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/repos","events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-05T18:46:05Z","updated_at":"2020-12-07T11:11:17Z","closed_at":"2020-12-07T11:11:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1171","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1171","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1171.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1171.patch","merged_at":"2020-12-07T11:11:16Z"},"body":"Added the imdb Urdu reviews dataset. More info about the dataset over here<\/a>.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1171\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1171\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1170","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1170\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1170\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1170\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1170","id":757754378,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDczOTU0","number":1170,"title":"Fix path handling for Windows","user":{"login":"edugp","id":17855740,"node_id":"MDQ6VXNlcjE3ODU1NzQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17855740?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/edugp","html_url":"https:\/\/github.com\/edugp","followers_url":"https:\/\/api.github.com\/users\/edugp\/followers","following_url":"https:\/\/api.github.com\/users\/edugp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/edugp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/edugp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/edugp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/edugp\/orgs","repos_url":"https:\/\/api.github.com\/users\/edugp\/repos","events_url":"https:\/\/api.github.com\/users\/edugp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/edugp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-05T18:31:54Z","updated_at":"2020-12-07T10:47:23Z","closed_at":"2020-12-07T10:47:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1170","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1170","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1170.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1170.patch","merged_at":"2020-12-07T10:47:23Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1170\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1170\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1169","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1169\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1169\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1169\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1169","id":757747997,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDY5MzAx","number":1169,"title":"Add Opus fiskmo dataset for Finnish and Swedish for MT task","user":{"login":"spatil6","id":6419011,"node_id":"MDQ6VXNlcjY0MTkwMTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6419011?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/spatil6","html_url":"https:\/\/github.com\/spatil6","followers_url":"https:\/\/api.github.com\/users\/spatil6\/followers","following_url":"https:\/\/api.github.com\/users\/spatil6\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/spatil6\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/spatil6\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/spatil6\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/spatil6\/orgs","repos_url":"https:\/\/api.github.com\/users\/spatil6\/repos","events_url":"https:\/\/api.github.com\/users\/spatil6\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/spatil6\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-05T17:56:55Z","updated_at":"2020-12-07T11:04:11Z","closed_at":"2020-12-07T11:04:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1169","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1169","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1169.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1169.patch","merged_at":"2020-12-07T11:04:11Z"},"body":"Adding fiskmo, a massive parallel corpus for Finnish and Swedish.\r\nfor more info : http:\/\/opus.nlpl.eu\/fiskmo.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1169\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1169\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1168","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1168\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1168\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1168\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1168","id":757740780,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDYzNjgy","number":1168,"title":"Add Naver sentiment movie corpus","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-05T17:25:23Z","updated_at":"2020-12-07T13:34:09Z","closed_at":"2020-12-07T13:34:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1168","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1168","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1168.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1168.patch","merged_at":null},"body":"This PR adds the [Naver sentiment movie corpus](https:\/\/github.com\/e9t\/nsmc), a dataset containing Korean movie reviews from Naver, the most commonly used search engine in Korea. This dataset is often used to benchmark models on Korean NLP tasks, as seen in [this paper](https:\/\/www.aclweb.org\/anthology\/2020.lrec-1.199.pdf). ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1168\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1168\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1167","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1167\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1167\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1167\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1167","id":757722921,"node_id":"MDU6SXNzdWU3NTc3MjI5MjE=","number":1167,"title":"\u2753 On-the-fly tokenization with datasets, tokenizers, and torch Datasets and Dataloaders","user":{"login":"pietrolesci","id":61748653,"node_id":"MDQ6VXNlcjYxNzQ4NjUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/61748653?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pietrolesci","html_url":"https:\/\/github.com\/pietrolesci","followers_url":"https:\/\/api.github.com\/users\/pietrolesci\/followers","following_url":"https:\/\/api.github.com\/users\/pietrolesci\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pietrolesci\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pietrolesci\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pietrolesci\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pietrolesci\/orgs","repos_url":"https:\/\/api.github.com\/users\/pietrolesci\/repos","events_url":"https:\/\/api.github.com\/users\/pietrolesci\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pietrolesci\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-05T17:02:56Z","updated_at":"2021-01-13T10:48:08Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi there,\r\n\r\nI have a question regarding \"on-the-fly\" tokenization. This question was elicited by reading the \"How to train a new language model from scratch using Transformers and Tokenizers\" [here](https:\/\/huggingface.co\/blog\/how-to-train). Towards the end there is this sentence: \"If your dataset is very large, you can opt to load and tokenize examples on the fly, rather than as a preprocessing step\". I've tried coming up with a solution that would combine both `datasets` and `tokenizers`, but did not manage to find a good pattern.\r\n\r\nI guess the solution would entail wrapping a dataset into a Pytorch dataset.\r\n\r\nAs a concrete example from the [docs](https:\/\/huggingface.co\/transformers\/custom_datasets.html)\r\n\r\n```python\r\nimport torch\r\n\r\nclass SquadDataset(torch.utils.data.Dataset):\r\n def __init__(self, encodings):\r\n # instead of doing this beforehand, I'd like to do tokenization on the fly\r\n self.encodings = encodings \r\n\r\n def __getitem__(self, idx):\r\n return {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}\r\n\r\n def __len__(self):\r\n return len(self.encodings.input_ids)\r\n\r\ntrain_dataset = SquadDataset(train_encodings)\r\n```\r\n\r\nHow would one implement this with \"on-the-fly\" tokenization exploiting the vectorized capabilities of tokenizers?\r\n\r\n\r\n----\r\n\r\nEdit: I have come up with this solution. It does what I want, but I feel it's not very elegant\r\n\r\n```python\r\nclass CustomPytorchDataset(Dataset):\r\n def __init__(self):\r\n self.dataset = some_hf_dataset(...)\r\n self.tokenizer = BertTokenizerFast.from_pretrained(\"bert-base-uncased\")\r\n\r\n def __getitem__(self, batch_idx):\r\n instance = self.dataset[text_col][batch_idx]\r\n tokenized_text = self.tokenizer(instance, truncation=True, padding=True)\r\n return tokenized_text\r\n\r\n def __len__(self):\r\n return len(self.dataset)\r\n\r\n @staticmethod\r\n def collate_fn(batch):\r\n # batch is a list, however it will always contain 1 item because we should not use the\r\n # batch_size argument as batch_size is controlled by the sampler\r\n return {k: torch.tensor(v) for k, v in batch[0].items()}\r\n\r\ntorch_ds = CustomPytorchDataset()\r\n\r\n# NOTE: batch_sampler returns list of integers and since here we have SequentialSampler\r\n# it returns: [1, 2, 3], [4, 5, 6], etc. - check calling `list(batch_sampler)`\r\nbatch_sampler = BatchSampler(SequentialSampler(torch_ds), batch_size=3, drop_last=True)\r\n\r\n# NOTE: no `batch_size` as now the it is controlled by the sampler!\r\ndl = DataLoader(dataset=torch_ds, sampler=batch_sampler, collate_fn=torch_ds.collate_fn)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1167\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1167\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1166","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1166\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1166\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1166\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1166","id":757721208,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDQ1NDUy","number":1166,"title":"Opus montenegrinsubs","user":{"login":"spatil6","id":6419011,"node_id":"MDQ6VXNlcjY0MTkwMTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6419011?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/spatil6","html_url":"https:\/\/github.com\/spatil6","followers_url":"https:\/\/api.github.com\/users\/spatil6\/followers","following_url":"https:\/\/api.github.com\/users\/spatil6\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/spatil6\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/spatil6\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/spatil6\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/spatil6\/orgs","repos_url":"https:\/\/api.github.com\/users\/spatil6\/repos","events_url":"https:\/\/api.github.com\/users\/spatil6\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/spatil6\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-05T17:00:44Z","updated_at":"2020-12-07T11:02:49Z","closed_at":"2020-12-07T11:02:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1166","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1166","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1166.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1166.patch","merged_at":"2020-12-07T11:02:49Z"},"body":"Opus montenegrinsubs - language pair en-me\r\nmore info : http:\/\/opus.nlpl.eu\/MontenegrinSubs.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1166\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1166\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1165","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1165\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1165\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1165\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1165","id":757720226,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDQ0NzEy","number":1165,"title":"Add ar rest reviews","user":{"login":"abdulelahsm","id":28743265,"node_id":"MDQ6VXNlcjI4NzQzMjY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28743265?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abdulelahsm","html_url":"https:\/\/github.com\/abdulelahsm","followers_url":"https:\/\/api.github.com\/users\/abdulelahsm\/followers","following_url":"https:\/\/api.github.com\/users\/abdulelahsm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abdulelahsm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abdulelahsm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abdulelahsm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abdulelahsm\/orgs","repos_url":"https:\/\/api.github.com\/users\/abdulelahsm\/repos","events_url":"https:\/\/api.github.com\/users\/abdulelahsm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abdulelahsm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-12-05T16:56:42Z","updated_at":"2020-12-21T17:06:23Z","closed_at":"2020-12-21T17:06:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1165","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1165","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1165.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1165.patch","merged_at":"2020-12-21T17:06:23Z"},"body":"added restaurants reviews in Arabic for sentiment analysis tasks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1165\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1165\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1164","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1164\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1164\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1164\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1164","id":757716575,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDQyMjA1","number":1164,"title":"Add DaNe dataset","user":{"login":"ophelielacroix","id":28562991,"node_id":"MDQ6VXNlcjI4NTYyOTkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28562991?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ophelielacroix","html_url":"https:\/\/github.com\/ophelielacroix","followers_url":"https:\/\/api.github.com\/users\/ophelielacroix\/followers","following_url":"https:\/\/api.github.com\/users\/ophelielacroix\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ophelielacroix\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ophelielacroix\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ophelielacroix\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ophelielacroix\/orgs","repos_url":"https:\/\/api.github.com\/users\/ophelielacroix\/repos","events_url":"https:\/\/api.github.com\/users\/ophelielacroix\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ophelielacroix\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-05T16:36:50Z","updated_at":"2020-12-08T12:50:18Z","closed_at":"2020-12-08T12:49:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1164","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1164","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1164.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1164.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1164\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1164\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1163","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1163\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1163\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1163\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1163","id":757711340,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDM4Mzc3","number":1163,"title":"Added memat : Xhosa-English parallel corpora","user":{"login":"spatil6","id":6419011,"node_id":"MDQ6VXNlcjY0MTkwMTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6419011?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/spatil6","html_url":"https:\/\/github.com\/spatil6","followers_url":"https:\/\/api.github.com\/users\/spatil6\/followers","following_url":"https:\/\/api.github.com\/users\/spatil6\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/spatil6\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/spatil6\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/spatil6\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/spatil6\/orgs","repos_url":"https:\/\/api.github.com\/users\/spatil6\/repos","events_url":"https:\/\/api.github.com\/users\/spatil6\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/spatil6\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-05T16:08:50Z","updated_at":"2020-12-07T10:40:24Z","closed_at":"2020-12-07T10:40:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1163","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1163","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1163.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1163.patch","merged_at":"2020-12-07T10:40:24Z"},"body":"Added memat : Xhosa-English parallel corpora\r\nfor more info : http:\/\/opus.nlpl.eu\/memat.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1163\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1163\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1162","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1162\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1162\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1162\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1162","id":757707085,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDM1MzEw","number":1162,"title":"Add Mocha dataset","user":{"login":"mattbui","id":46804938,"node_id":"MDQ6VXNlcjQ2ODA0OTM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46804938?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mattbui","html_url":"https:\/\/github.com\/mattbui","followers_url":"https:\/\/api.github.com\/users\/mattbui\/followers","following_url":"https:\/\/api.github.com\/users\/mattbui\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mattbui\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mattbui\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mattbui\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mattbui\/orgs","repos_url":"https:\/\/api.github.com\/users\/mattbui\/repos","events_url":"https:\/\/api.github.com\/users\/mattbui\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mattbui\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T15:45:14Z","updated_at":"2020-12-07T10:09:39Z","closed_at":"2020-12-07T10:09:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1162","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1162","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1162.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1162.patch","merged_at":"2020-12-07T10:09:39Z"},"body":"More information: https:\/\/allennlp.org\/mocha","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1162\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1162\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1161","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1161\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1161\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1161\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1161","id":757705286,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDM0MDM3","number":1161,"title":"Linguisticprobing","user":{"login":"sileod","id":9168444,"node_id":"MDQ6VXNlcjkxNjg0NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9168444?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sileod","html_url":"https:\/\/github.com\/sileod","followers_url":"https:\/\/api.github.com\/users\/sileod\/followers","following_url":"https:\/\/api.github.com\/users\/sileod\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sileod\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sileod\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sileod\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sileod\/orgs","repos_url":"https:\/\/api.github.com\/users\/sileod\/repos","events_url":"https:\/\/api.github.com\/users\/sileod\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sileod\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T15:35:18Z","updated_at":"2020-12-09T16:38:26Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1161","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1161","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1161.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1161.patch","merged_at":null},"body":"Adding Linguistic probing datasets from\r\nWhat you can cram into a single $&!#* vector: Probing sentence embeddings for linguistic properties\r\n https:\/\/www.aclweb.org\/anthology\/P18-1198\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1161\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1161\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1160","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1160\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1160\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1160\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1160","id":757677188,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDE0Nzcw","number":1160,"title":"adding TabFact dataset","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-05T13:05:52Z","updated_at":"2020-12-09T11:41:39Z","closed_at":"2020-12-09T09:12:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1160","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1160","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1160.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1160.patch","merged_at":"2020-12-09T09:12:40Z"},"body":"Adding TabFact: A Large-scale Dataset for Table-based Fact Verification.\r\n\r\nhttps:\/\/github.com\/wenhuchen\/Table-Fact-Checking\r\n\r\n- The tables are stored as individual csv files, so need to download 16,573 \ud83e\udd2f csv files. As a result the `datasets_infos.json` file is huge (6.62 MB).\r\n- Original dataset has nested structure where, where table is one example and each table has multiple statements,\r\nflattening the structure here so that each statement is one example.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1160\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1160\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1159","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1159\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1159\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1159\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1159","id":757661128,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDAyNzYx","number":1159,"title":"Add Roman Urdu dataset","user":{"login":"jaketae","id":25360440,"node_id":"MDQ6VXNlcjI1MzYwNDQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25360440?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jaketae","html_url":"https:\/\/github.com\/jaketae","followers_url":"https:\/\/api.github.com\/users\/jaketae\/followers","following_url":"https:\/\/api.github.com\/users\/jaketae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jaketae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jaketae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jaketae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jaketae\/orgs","repos_url":"https:\/\/api.github.com\/users\/jaketae\/repos","events_url":"https:\/\/api.github.com\/users\/jaketae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jaketae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T11:36:43Z","updated_at":"2020-12-07T13:41:21Z","closed_at":"2020-12-07T09:59:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1159","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1159","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1159.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1159.patch","merged_at":"2020-12-07T09:59:03Z"},"body":"This PR adds the [Roman Urdu dataset](https:\/\/archive.ics.uci.edu\/ml\/datasets\/Roman+Urdu+Data+Set#). ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1159\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1159\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1158","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1158\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1158\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1158\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1158","id":757658926,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDAxMjM0","number":1158,"title":"Add BBC Hindi NLI Dataset ","user":{"login":"avinsit123","id":33565881,"node_id":"MDQ6VXNlcjMzNTY1ODgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33565881?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/avinsit123","html_url":"https:\/\/github.com\/avinsit123","followers_url":"https:\/\/api.github.com\/users\/avinsit123\/followers","following_url":"https:\/\/api.github.com\/users\/avinsit123\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/avinsit123\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/avinsit123\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/avinsit123\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/avinsit123\/orgs","repos_url":"https:\/\/api.github.com\/users\/avinsit123\/repos","events_url":"https:\/\/api.github.com\/users\/avinsit123\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/avinsit123\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-12-05T11:25:34Z","updated_at":"2021-02-05T09:48:31Z","closed_at":"2021-02-05T09:48:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1158","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1158","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1158.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1158.patch","merged_at":"2021-02-05T09:48:31Z"},"body":"# Dataset Card for BBC Hindi NLI Dataset\r\n\r\n## Table of Contents\r\n- [Dataset Description](#dataset-description)\r\n - [Dataset Summary](#dataset-summary)\r\n - [Supported Tasks](#supported-tasks-and-leaderboards)\r\n - [Languages](#languages)\r\n- [Dataset Structure](#dataset-structure)\r\n - [Data Instances](#data-instances)\r\n - [Data Fields](#data-fields)\r\n - [Data Splits](#data-splits)\r\n- [Dataset Creation](#dataset-creation)\r\n - [Curation Rationale](#curation-rationale)\r\n - [Source Data](#source-data)\r\n - [Annotations](#annotations)\r\n - [Personal and Sensitive Information](#personal-and-sensitive-information)\r\n- [Considerations for Using the Data](#considerations-for-using-the-data)\r\n - [Social Impact of Dataset](#social-impact-of-dataset)\r\n - [Discussion of Biases](#discussion-of-biases)\r\n - [Other Known Limitations](#other-known-limitations)\r\n- [Additional Information](#additional-information)\r\n - [Dataset Curators](#dataset-curators)\r\n - [Licensing Information](#licensing-information)\r\n - [Citation Information](#citation-information)\r\n\r\n## Dataset Description\r\n\r\n- HomePage : https:\/\/github.com\/midas-research\/hindi-nli-data\r\n- Paper : \"https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\"\r\n- Point of Contact : https:\/\/github.com\/midas-research\/hindi-nli-data\r\n\r\n### Dataset Summary\r\n\r\n- Dataset for Natural Language Inference in Hindi Language. BBC Hindi Dataset consists of textual-entailment pairs.\r\n- Each row of the Datasets if made up of 4 columns - Premise, Hypothesis, Label and Topic.\r\n- Context and Hypothesis is written in Hindi while Entailment_Label is in English.\r\n- Entailment_label is of 2 types - entailed and not-entailed.\r\n- Dataset can be used to train models for Natural Language Inference tasks in Hindi Language.\r\n[More Information Needed]\r\n\r\n### Supported Tasks and Leaderboards\r\n\r\n- Natural Language Inference for Hindi\r\n\r\n### Languages\r\n\r\nDataset is in Hindi\r\n\r\n## Dataset Structure\r\n\r\n- Data is structured in TSV format. \r\n- Train and Test files are in seperate files\r\n\r\n\r\n### Dataset Instances\r\n\r\nAn example of 'train' looks as follows.\r\n\r\n```\r\n{'hypothesis': '\u092f\u0939 \u0916\u092c\u0930 \u0915\u0940 \u0938\u0942\u091a\u0928\u093e \u0939\u0948|', 'label': 'entailed', 'premise': '\u0917\u094b\u092a\u0928\u0940\u092f\u0924\u093e \u0915\u0940 \u0928\u0940\u0924\u093f', 'topic': '1'}\r\n\r\n```\r\n### Data Fields\r\n\r\n- Each row contatins 4 columns - Premise, Hypothesis, Label and Topic.\r\n\r\n### Data Splits\r\n\r\n- Train : 15553\r\n- Valid : 2581\r\n- Test : 2593\r\n\r\n## Dataset Creation\r\n\r\n- We employ a recasting technique from Poliak et al. (2018a,b) to convert publicly available BBC Hindi news text classification datasets in Hindi and pose them as TE problems\r\n- In this recasting process, we build template hypotheses for each class in the label taxonomy\r\n- Then, we pair the original annotated sentence with each of the template hypotheses to create TE samples.\r\n- For more information on the recasting process, refer to paper \"https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\"\r\n\r\n### Source Data\r\n\r\nSource Dataset for the recasting process is the BBC Hindi Headlines Dataset(https:\/\/github.com\/NirantK\/hindi2vec\/releases\/tag\/bbc-hindi-v0.1)\r\n\r\n#### Initial Data Collection and Normalization\r\n\r\n- BBC Hindi News Classification Dataset contains 4, 335 Hindi news headlines tagged across 14 categories: India, Pakistan,news, International, entertainment, sport, science, China, learning english, social, southasia, business, institutional, multimedia\r\n- We processed this dataset to combine two sets of relevant but low prevalence classes.\r\n- Namely, we merged the samples from Pakistan, China, international, and southasia as one class called international.\r\n- Likewise, we also merged samples from news, business, social, learning english, and institutional as news.\r\n- Lastly, we also removed the class multimedia because there were very few samples.\r\n\r\n#### Who are the source language producers?\r\n\r\nPls refer to this paper: \"https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\"\r\n\r\n### Annotations\r\n\r\n#### Annotation process\r\n\r\nAnnotation process has been described in Dataset Creation Section.\r\n\r\n#### Who are the annotators?\r\n\r\nAnnotation is done automatically.\r\n\r\n### Personal and Sensitive Information\r\n\r\nNo Personal and Sensitive Information is mentioned in the Datasets.\r\n\r\n## Considerations for Using the Data\r\n\r\nPls refer to this paper: https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\r\n\r\n### Discussion of Biases\r\n\r\nPls refer to this paper: https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\r\n\r\n### Other Known Limitations\r\n\r\nNo other known limitations\r\n\r\n## Additional Information\r\n\r\nPls refer to this link: https:\/\/github.com\/midas-research\/hindi-nli-data\r\n\r\n### Dataset Curators\r\n\r\nIt is written in the repo : https:\/\/github.com\/avinsit123\/hindi-nli-data that \r\n- This corpus can be used freely for research purposes.\r\n- The paper listed below provide details of the creation and use of the corpus. If you use the corpus, then please cite the paper.\r\n- If interested in commercial use of the corpus, send email to midas@iiitd.ac.in.\r\n- If you use the corpus in a product or application, then please credit the authors and Multimodal Digital Media Analysis Lab - Indraprastha Institute of Information Technology, New Delhi appropriately. Also, if you send us an email, we will be thrilled to know about how you have used the corpus.\r\n- Multimodal Digital Media Analysis Lab - Indraprastha Institute of Information Technology, New Delhi, India disclaims any responsibility for the use of the corpus and does not provide technical support. However, the contact listed above will be happy to respond to queries and clarifications.\r\n- Rather than redistributing the corpus, please direct interested parties to this page\r\n- Please feel free to send us an email:\r\n - with feedback regarding the corpus.\r\n - with information on how you have used the corpus.\r\n - if interested in having us analyze your data for natural language inference.\r\n - if interested in a collaborative research project.\r\n\r\n\r\n### Licensing Information\r\n\r\nCopyright (C) 2019 Multimodal Digital Media Analysis Lab - Indraprastha Institute of Information Technology, New Delhi (MIDAS, IIIT-Delhi).\r\nPls contact authors for any information on the dataset.\r\n\r\n### Citation Information\r\n\r\n```\r\n @inproceedings{uppal-etal-2020-two,\r\n title = \"Two-Step Classification using Recasted Data for Low Resource Settings\",\r\n author = \"Uppal, Shagun and\r\n Gupta, Vivek and\r\n Swaminathan, Avinash and\r\n Zhang, Haimin and\r\n Mahata, Debanjan and\r\n Gosangi, Rakesh and\r\n Shah, Rajiv Ratn and\r\n Stent, Amanda\",\r\n booktitle = \"Proceedings of the 1st Conference of the Asia-Pacific Chapter of the Association for Computational Linguistics and the 10th International Joint Conference on Natural Language Processing\",\r\n month = dec,\r\n year = \"2020\",\r\n address = \"Suzhou, China\",\r\n publisher = \"Association for Computational Linguistics\",\r\n url = \"https:\/\/www.aclweb.org\/anthology\/2020.aacl-main.71\",\r\n pages = \"706--719\",\r\n abstract = \"An NLP model{'}s ability to reason should be independent of language. Previous works utilize Natural Language Inference (NLI) to understand the reasoning ability of models, mostly focusing on high resource languages like English. To address scarcity of data in low-resource languages such as Hindi, we use data recasting to create NLI datasets for four existing text classification datasets. Through experiments, we show that our recasted dataset is devoid of statistical irregularities and spurious patterns. We further study the consistency in predictions of the textual entailment models and propose a consistency regulariser to remove pairwise-inconsistencies in predictions. We propose a novel two-step classification method which uses textual-entailment predictions for classification task. We further improve the performance by using a joint-objective for classification and textual entailment. We therefore highlight the benefits of data recasting and improvements on classification performance using our approach with supporting experimental results.\",\r\n}\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1158\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1158\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1157","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1157\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1157\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1157\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1157","id":757657888,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMzMDAwNDQy","number":1157,"title":"Add dataset XhosaNavy English -Xhosa","user":{"login":"spatil6","id":6419011,"node_id":"MDQ6VXNlcjY0MTkwMTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6419011?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/spatil6","html_url":"https:\/\/github.com\/spatil6","followers_url":"https:\/\/api.github.com\/users\/spatil6\/followers","following_url":"https:\/\/api.github.com\/users\/spatil6\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/spatil6\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/spatil6\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/spatil6\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/spatil6\/orgs","repos_url":"https:\/\/api.github.com\/users\/spatil6\/repos","events_url":"https:\/\/api.github.com\/users\/spatil6\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/spatil6\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T11:19:54Z","updated_at":"2020-12-07T09:11:33Z","closed_at":"2020-12-07T09:11:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1157","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1157","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1157.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1157.patch","merged_at":"2020-12-07T09:11:33Z"},"body":"Add dataset XhosaNavy English -Xhosa\r\nMore info : http:\/\/opus.nlpl.eu\/XhosaNavy.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1157\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1157\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1156","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1156\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1156\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1156\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1156","id":757656094,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyOTk5MTQ1","number":1156,"title":"add telugu-news corpus","user":{"login":"oostopitre","id":3135345,"node_id":"MDQ6VXNlcjMxMzUzNDU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3135345?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/oostopitre","html_url":"https:\/\/github.com\/oostopitre","followers_url":"https:\/\/api.github.com\/users\/oostopitre\/followers","following_url":"https:\/\/api.github.com\/users\/oostopitre\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/oostopitre\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/oostopitre\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/oostopitre\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/oostopitre\/orgs","repos_url":"https:\/\/api.github.com\/users\/oostopitre\/repos","events_url":"https:\/\/api.github.com\/users\/oostopitre\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/oostopitre\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T11:07:56Z","updated_at":"2020-12-07T09:08:48Z","closed_at":"2020-12-07T09:08:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1156","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1156","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1156.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1156.patch","merged_at":"2020-12-07T09:08:48Z"},"body":"Adding Telugu News Corpus to datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1156\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1156\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1155","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1155\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1155\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1155\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1155","id":757652517,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyOTk2NjQ2","number":1155,"title":"Add BSD ","user":{"login":"j-chim","id":22435209,"node_id":"MDQ6VXNlcjIyNDM1MjA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22435209?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/j-chim","html_url":"https:\/\/github.com\/j-chim","followers_url":"https:\/\/api.github.com\/users\/j-chim\/followers","following_url":"https:\/\/api.github.com\/users\/j-chim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/j-chim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/j-chim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/j-chim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/j-chim\/orgs","repos_url":"https:\/\/api.github.com\/users\/j-chim\/repos","events_url":"https:\/\/api.github.com\/users\/j-chim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/j-chim\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-05T10:43:48Z","updated_at":"2020-12-07T09:27:46Z","closed_at":"2020-12-07T09:27:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1155","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1155","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1155.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1155.patch","merged_at":"2020-12-07T09:27:46Z"},"body":"This PR adds BSD, the Japanese-English business dialogue corpus by \r\n[Rikters et al., 2020](https:\/\/www.aclweb.org\/anthology\/D19-5204.pdf). ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1155\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1155\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1154","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1154\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1154\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1154\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1154","id":757651669,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyOTk2MDQ3","number":1154,"title":"Opus sardware","user":{"login":"spatil6","id":6419011,"node_id":"MDQ6VXNlcjY0MTkwMTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6419011?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/spatil6","html_url":"https:\/\/github.com\/spatil6","followers_url":"https:\/\/api.github.com\/users\/spatil6\/followers","following_url":"https:\/\/api.github.com\/users\/spatil6\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/spatil6\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/spatil6\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/spatil6\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/spatil6\/orgs","repos_url":"https:\/\/api.github.com\/users\/spatil6\/repos","events_url":"https:\/\/api.github.com\/users\/spatil6\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/spatil6\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T10:38:02Z","updated_at":"2020-12-05T17:05:45Z","closed_at":"2020-12-05T17:05:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1154","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1154","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1154.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1154.patch","merged_at":"2020-12-05T17:05:45Z"},"body":"Added Opus sardware dataset for machine translation English to Sardinian.\r\nfor more info : http:\/\/opus.nlpl.eu\/sardware.php","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1154\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1154\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1153","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1153\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1153\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1153\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1153","id":757643302,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyOTkwMTk4","number":1153,"title":"Adding dataset for proto_qa in huggingface datasets library","user":{"login":"bpatidar","id":12439573,"node_id":"MDQ6VXNlcjEyNDM5NTcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12439573?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bpatidar","html_url":"https:\/\/github.com\/bpatidar","followers_url":"https:\/\/api.github.com\/users\/bpatidar\/followers","following_url":"https:\/\/api.github.com\/users\/bpatidar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bpatidar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bpatidar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bpatidar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bpatidar\/orgs","repos_url":"https:\/\/api.github.com\/users\/bpatidar\/repos","events_url":"https:\/\/api.github.com\/users\/bpatidar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bpatidar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T09:43:28Z","updated_at":"2020-12-05T18:53:10Z","closed_at":"2020-12-05T18:53:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1153","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1153","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1153.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1153.patch","merged_at":null},"body":"Added dataset for ProtoQA: A Question Answering Dataset for Prototypical Common-Sense Reasoning\r\nFollowed all steps for adding a new dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1153\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1153\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1152","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1152\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1152\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1152\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1152","id":757640506,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyOTg4MjMw","number":1152,"title":"hindi discourse analysis dataset commit","user":{"login":"duttahritwik","id":31453142,"node_id":"MDQ6VXNlcjMxNDUzMTQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31453142?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/duttahritwik","html_url":"https:\/\/github.com\/duttahritwik","followers_url":"https:\/\/api.github.com\/users\/duttahritwik\/followers","following_url":"https:\/\/api.github.com\/users\/duttahritwik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/duttahritwik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/duttahritwik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/duttahritwik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/duttahritwik\/orgs","repos_url":"https:\/\/api.github.com\/users\/duttahritwik\/repos","events_url":"https:\/\/api.github.com\/users\/duttahritwik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/duttahritwik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-12-05T09:24:01Z","updated_at":"2020-12-14T19:44:48Z","closed_at":"2020-12-14T19:44:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1152","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1152","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1152.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1152.patch","merged_at":"2020-12-14T19:44:48Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1152\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1152\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1151","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1151\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1151\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1151\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1151","id":757517092,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyODc5ODk4","number":1151,"title":"adding psc dataset","user":{"login":"abecadel","id":1654113,"node_id":"MDQ6VXNlcjE2NTQxMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1654113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abecadel","html_url":"https:\/\/github.com\/abecadel","followers_url":"https:\/\/api.github.com\/users\/abecadel\/followers","following_url":"https:\/\/api.github.com\/users\/abecadel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abecadel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abecadel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abecadel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abecadel\/orgs","repos_url":"https:\/\/api.github.com\/users\/abecadel\/repos","events_url":"https:\/\/api.github.com\/users\/abecadel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abecadel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T02:40:01Z","updated_at":"2020-12-09T11:38:41Z","closed_at":"2020-12-09T11:38:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1151","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1151","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1151.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1151.patch","merged_at":"2020-12-09T11:38:41Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1151\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1151\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1150","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1150\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1150\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1150\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1150","id":757512441,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyODc2MzEz","number":1150,"title":"adding dyk dataset","user":{"login":"abecadel","id":1654113,"node_id":"MDQ6VXNlcjE2NTQxMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1654113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abecadel","html_url":"https:\/\/github.com\/abecadel","followers_url":"https:\/\/api.github.com\/users\/abecadel\/followers","following_url":"https:\/\/api.github.com\/users\/abecadel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abecadel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abecadel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abecadel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abecadel\/orgs","repos_url":"https:\/\/api.github.com\/users\/abecadel\/repos","events_url":"https:\/\/api.github.com\/users\/abecadel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abecadel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T02:11:42Z","updated_at":"2020-12-05T16:52:19Z","closed_at":"2020-12-05T16:52:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1150","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1150","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1150.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1150.patch","merged_at":"2020-12-05T16:52:19Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1150\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1150\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1149","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1149\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1149\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1149\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1149","id":757504068,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyODY5ODUz","number":1149,"title":"Fix typo in the comment in _info function","user":{"login":"vinaykudari","id":34424769,"node_id":"MDQ6VXNlcjM0NDI0NzY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34424769?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vinaykudari","html_url":"https:\/\/github.com\/vinaykudari","followers_url":"https:\/\/api.github.com\/users\/vinaykudari\/followers","following_url":"https:\/\/api.github.com\/users\/vinaykudari\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vinaykudari\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vinaykudari\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vinaykudari\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vinaykudari\/orgs","repos_url":"https:\/\/api.github.com\/users\/vinaykudari\/repos","events_url":"https:\/\/api.github.com\/users\/vinaykudari\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vinaykudari\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T01:26:20Z","updated_at":"2020-12-05T16:19:26Z","closed_at":"2020-12-05T16:19:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1149","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1149","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1149.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1149.patch","merged_at":"2020-12-05T16:19:26Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1149\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1149\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1148","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1148\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1148\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1148\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1148","id":757503918,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyODY5NzM0","number":1148,"title":"adding polemo2 dataset","user":{"login":"abecadel","id":1654113,"node_id":"MDQ6VXNlcjE2NTQxMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1654113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abecadel","html_url":"https:\/\/github.com\/abecadel","followers_url":"https:\/\/api.github.com\/users\/abecadel\/followers","following_url":"https:\/\/api.github.com\/users\/abecadel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abecadel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abecadel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abecadel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abecadel\/orgs","repos_url":"https:\/\/api.github.com\/users\/abecadel\/repos","events_url":"https:\/\/api.github.com\/users\/abecadel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abecadel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T01:25:29Z","updated_at":"2020-12-05T16:51:39Z","closed_at":"2020-12-05T16:51:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1148","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1148","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1148.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1148.patch","merged_at":"2020-12-05T16:51:38Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1148\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1148\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1147","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1147\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1147\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1147\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1147","id":757502199,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyODY4MzU2","number":1147,"title":"Vinay\/add\/telugu books","user":{"login":"vinaykudari","id":34424769,"node_id":"MDQ6VXNlcjM0NDI0NzY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34424769?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vinaykudari","html_url":"https:\/\/github.com\/vinaykudari","followers_url":"https:\/\/api.github.com\/users\/vinaykudari\/followers","following_url":"https:\/\/api.github.com\/users\/vinaykudari\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vinaykudari\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vinaykudari\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vinaykudari\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vinaykudari\/orgs","repos_url":"https:\/\/api.github.com\/users\/vinaykudari\/repos","events_url":"https:\/\/api.github.com\/users\/vinaykudari\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vinaykudari\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T01:17:02Z","updated_at":"2020-12-05T16:36:04Z","closed_at":"2020-12-05T16:36:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1147","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1147","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1147.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1147.patch","merged_at":"2020-12-05T16:36:03Z"},"body":"Real data tests are failing as this dataset needs to be manually downloaded","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1147\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1147\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1146","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1146\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1146\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1146\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1146","id":757498565,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyODY1NTAy","number":1146,"title":"Add LINNAEUS","user":{"login":"edugp","id":17855740,"node_id":"MDQ6VXNlcjE3ODU1NzQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17855740?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/edugp","html_url":"https:\/\/github.com\/edugp","followers_url":"https:\/\/api.github.com\/users\/edugp\/followers","following_url":"https:\/\/api.github.com\/users\/edugp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/edugp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/edugp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/edugp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/edugp\/orgs","repos_url":"https:\/\/api.github.com\/users\/edugp\/repos","events_url":"https:\/\/api.github.com\/users\/edugp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/edugp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-05T01:01:09Z","updated_at":"2020-12-05T16:35:53Z","closed_at":"2020-12-05T16:35:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1146","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1146","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1146.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1146.patch","merged_at":"2020-12-05T16:35:53Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1146\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1146\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1145","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1145\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1145\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1145\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1145","id":757477349,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyODQ4MTQx","number":1145,"title":"Add Species-800","user":{"login":"edugp","id":17855740,"node_id":"MDQ6VXNlcjE3ODU1NzQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17855740?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/edugp","html_url":"https:\/\/github.com\/edugp","followers_url":"https:\/\/api.github.com\/users\/edugp\/followers","following_url":"https:\/\/api.github.com\/users\/edugp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/edugp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/edugp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/edugp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/edugp\/orgs","repos_url":"https:\/\/api.github.com\/users\/edugp\/repos","events_url":"https:\/\/api.github.com\/users\/edugp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/edugp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-04T23:44:51Z","updated_at":"2022-01-13T03:09:20Z","closed_at":"2020-12-05T16:35:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1145","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1145","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1145.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1145.patch","merged_at":"2020-12-05T16:35:01Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1145\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1145\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1144","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1144\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1144\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1144\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1144","id":757452831,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyODI3OTI4","number":1144,"title":"Add JFLEG","user":{"login":"j-chim","id":22435209,"node_id":"MDQ6VXNlcjIyNDM1MjA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22435209?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/j-chim","html_url":"https:\/\/github.com\/j-chim","followers_url":"https:\/\/api.github.com\/users\/j-chim\/followers","following_url":"https:\/\/api.github.com\/users\/j-chim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/j-chim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/j-chim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/j-chim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/j-chim\/orgs","repos_url":"https:\/\/api.github.com\/users\/j-chim\/repos","events_url":"https:\/\/api.github.com\/users\/j-chim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/j-chim\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-04T22:36:38Z","updated_at":"2020-12-06T18:16:04Z","closed_at":"2020-12-06T18:16:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1144","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1144","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1144.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1144.patch","merged_at":"2020-12-06T18:16:04Z"},"body":"This PR adds [JFLEG ](https:\/\/www.aclweb.org\/anthology\/E17-2037\/), an English grammatical error correction benchmark. \r\n\r\nThe tests were successful on real data, although it would be great if I can get some guidance on the **dummy data**. Basically, **for each source sentence there are 4 possible gold standard target sentences**. The original dataset comprise files in a flat structure, labelled by split then by source\/target (e.g., dev.src, dev.ref0, ..., dev.ref3). Not sure what is the best way of adding this.\r\n\r\nI imagine I can treat each distinct source-target pair as its own split? But having so many copies of the source sentence feels redundant, and it would make it less convenient to end-users who might want to access multiple gold standard targets simultaneously. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1144\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1144\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1143","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1143\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1143\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1143\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1143","id":757448920,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyODI0NzMx","number":1143,"title":"Add the Winograd Schema Challenge","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T22:26:59Z","updated_at":"2020-12-09T15:11:31Z","closed_at":"2020-12-09T09:32:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1143","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1143","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1143.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1143.patch","merged_at":"2020-12-09T09:32:34Z"},"body":"Adds the Winograd Schema Challenge, including configs for the more canonical wsc273 as well as wsc285 with 12 new examples.\r\n\r\n- https:\/\/cs.nyu.edu\/faculty\/davise\/papers\/WinogradSchemas\/WS.html\r\n\r\nThe data format was a bit of a nightmare but I think I got it to a workable format.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1143\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1143\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1142","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1142\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1142\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1142\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1142","id":757413920,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzk1MjY0","number":1142,"title":"Fix PerSenT","user":{"login":"jeromeku","id":2455711,"node_id":"MDQ6VXNlcjI0NTU3MTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2455711?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jeromeku","html_url":"https:\/\/github.com\/jeromeku","followers_url":"https:\/\/api.github.com\/users\/jeromeku\/followers","following_url":"https:\/\/api.github.com\/users\/jeromeku\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jeromeku\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jeromeku\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jeromeku\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jeromeku\/orgs","repos_url":"https:\/\/api.github.com\/users\/jeromeku\/repos","events_url":"https:\/\/api.github.com\/users\/jeromeku\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jeromeku\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T21:21:02Z","updated_at":"2020-12-14T13:39:34Z","closed_at":"2020-12-14T13:39:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1142","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1142","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1142.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1142.patch","merged_at":"2020-12-14T13:39:34Z"},"body":"New PR for dataset PerSenT","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1142\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1142\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1141","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1141\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1141\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1141\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1141","id":757411057,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzkyNzU3","number":1141,"title":"Add GitHub version of ETH Py150 Corpus","user":{"login":"Bharat123rox","id":13381361,"node_id":"MDQ6VXNlcjEzMzgxMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13381361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Bharat123rox","html_url":"https:\/\/github.com\/Bharat123rox","followers_url":"https:\/\/api.github.com\/users\/Bharat123rox\/followers","following_url":"https:\/\/api.github.com\/users\/Bharat123rox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Bharat123rox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Bharat123rox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Bharat123rox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Bharat123rox\/orgs","repos_url":"https:\/\/api.github.com\/users\/Bharat123rox\/repos","events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-04T21:16:08Z","updated_at":"2020-12-09T18:32:44Z","closed_at":"2020-12-07T10:00:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1141","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1141","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1141.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1141.patch","merged_at":"2020-12-07T10:00:24Z"},"body":"Add the redistributable version of **ETH Py150 Corpus**","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1141\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1141\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1140","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1140\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1140\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1140\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1140","id":757399142,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzgyODc0","number":1140,"title":"Add Urdu Sentiment Corpus (USC). ","user":{"login":"chaitnayabasava","id":44389205,"node_id":"MDQ6VXNlcjQ0Mzg5MjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44389205?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chaitnayabasava","html_url":"https:\/\/github.com\/chaitnayabasava","followers_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/followers","following_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/orgs","repos_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/repos","events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-04T20:55:27Z","updated_at":"2020-12-07T03:27:23Z","closed_at":"2020-12-07T03:27:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1140","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1140","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1140.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1140.patch","merged_at":null},"body":"Added Urdu Sentiment Corpus. More details about the dataset over here<\/a>. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1140\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1140\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1139","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1139\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1139\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1139\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1139","id":757393158,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzc3OTg2","number":1139,"title":"Add ReFreSD dataset","user":{"login":"mpariente","id":18496796,"node_id":"MDQ6VXNlcjE4NDk2Nzk2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18496796?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mpariente","html_url":"https:\/\/github.com\/mpariente","followers_url":"https:\/\/api.github.com\/users\/mpariente\/followers","following_url":"https:\/\/api.github.com\/users\/mpariente\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mpariente\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mpariente\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mpariente\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mpariente\/orgs","repos_url":"https:\/\/api.github.com\/users\/mpariente\/repos","events_url":"https:\/\/api.github.com\/users\/mpariente\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mpariente\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-04T20:45:11Z","updated_at":"2020-12-16T16:01:18Z","closed_at":"2020-12-16T16:01:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1139","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1139","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1139.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1139.patch","merged_at":"2020-12-16T16:01:18Z"},"body":"This PR adds the **ReFreSD dataset**. \r\nThe original data is hosted [on this github repo](https:\/\/github.com\/Elbria\/xling-SemDiv) and we use the `REFreSD_rationale` to expose all the data. \r\n\r\n\r\nNeed feedback on:\r\n- I couldn't generate the dummy data. The file we download is a tsv file, but without extension, I suppose this is the problem. I'm sure there is a simple trick to make this work. \r\n- The feature names. \r\n - I don't know if it's better to stick to the classic `sentence1`, `sentence2` or to `sentence_en`, `sentence_fr` to be more explicit. \r\n - There is a binary label (called `label`, no problem here), and a 3-class label called `#3_labels` in the original tsv. I changed it to `all_labels` but I'm sure there is better. \r\n- The rationales are lists of integers, extracted as a string at first. I wonder what's the best way to treat them, any idea? Also, I couldn't manage to make a `Sequence` of `int8` but I'm sure I've missed something simple. \r\n\r\nThanks in advance ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1139\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1139\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1138","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1138\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1138\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1138\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1138","id":757378406,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzY1NTI2","number":1138,"title":"updated after the class name update","user":{"login":"timpal0l","id":6556710,"node_id":"MDQ6VXNlcjY1NTY3MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6556710?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timpal0l","html_url":"https:\/\/github.com\/timpal0l","followers_url":"https:\/\/api.github.com\/users\/timpal0l\/followers","following_url":"https:\/\/api.github.com\/users\/timpal0l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timpal0l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timpal0l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timpal0l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timpal0l\/orgs","repos_url":"https:\/\/api.github.com\/users\/timpal0l\/repos","events_url":"https:\/\/api.github.com\/users\/timpal0l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timpal0l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T20:19:43Z","updated_at":"2020-12-05T15:43:32Z","closed_at":"2020-12-05T15:43:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1138","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1138","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1138.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1138.patch","merged_at":"2020-12-05T15:43:32Z"},"body":"@lhoestq <--- ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1138\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1138\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1137","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1137\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1137\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1137\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1137","id":757358145,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzQ4NDAx","number":1137,"title":"add wmt mlqe 2020 shared task","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T19:45:34Z","updated_at":"2020-12-06T19:59:44Z","closed_at":"2020-12-06T19:53:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1137","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1137","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1137.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1137.patch","merged_at":null},"body":"First commit for Shared task 1 (wmt_mlqw_task1) of WMT20 MLQE (quality estimation of machine translation)\r\nNote that I copied the tags in the README for only one (of the 7 configurations): `en-de`.\r\nThere is one configuration for each pair of languages.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1137\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1137\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1136","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1136\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1136\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1136\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1136","id":757341607,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzM0MzQ4","number":1136,"title":"minor change in description in paws-x.py and updated dataset_infos","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T19:17:49Z","updated_at":"2020-12-06T18:02:57Z","closed_at":"2020-12-06T18:02:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1136","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1136","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1136.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1136.patch","merged_at":"2020-12-06T18:02:57Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1136\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1136\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1135","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1135\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1135\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1135\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1135","id":757325741,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzIxMDIz","number":1135,"title":"added paws","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T18:52:38Z","updated_at":"2020-12-09T17:17:13Z","closed_at":"2020-12-09T17:17:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1135","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1135","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1135.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1135.patch","merged_at":"2020-12-09T17:17:13Z"},"body":"Updating README and tags for dataset card in a while","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1135\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1135\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1134","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1134\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1134\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1134\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1134","id":757317651,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzE0MjQ2","number":1134,"title":"adding xquad-r dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T18:39:13Z","updated_at":"2020-12-05T16:50:47Z","closed_at":"2020-12-05T16:50:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1134","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1134","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1134.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1134.patch","merged_at":"2020-12-05T16:50:47Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1134\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1134\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1133","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1133\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1133\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1133\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1133","id":757307660,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzA1ODQ4","number":1133,"title":"Adding XQUAD-R Dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T18:22:29Z","updated_at":"2020-12-04T18:28:54Z","closed_at":"2020-12-04T18:28:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1133","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1133","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1133.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1133.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1133\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1133\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1132","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1132\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1132\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1132\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1132","id":757301368,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNzAwNTY5","number":1132,"title":"Add Urdu Sentiment Corpus (USC).","user":{"login":"chaitnayabasava","id":44389205,"node_id":"MDQ6VXNlcjQ0Mzg5MjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44389205?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chaitnayabasava","html_url":"https:\/\/github.com\/chaitnayabasava","followers_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/followers","following_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/orgs","repos_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/repos","events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T18:12:24Z","updated_at":"2020-12-04T20:52:48Z","closed_at":"2020-12-04T20:52:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1132","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1132","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1132.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1132.patch","merged_at":null},"body":"Added Urdu Sentiment Corpus. More details about the dataset over here<\/a>.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1132\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1132\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1131","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1131\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1131\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1131\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1131","id":757278341,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNjgxMTI0","number":1131,"title":"Adding XQUAD-R Dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T17:35:43Z","updated_at":"2020-12-04T18:27:22Z","closed_at":"2020-12-04T18:27:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1131","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1131","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1131.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1131.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1131\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1131\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1130","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1130\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1130\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1130\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1130","id":757265075,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNjY5ODY0","number":1130,"title":"adding discovery","user":{"login":"sileod","id":9168444,"node_id":"MDQ6VXNlcjkxNjg0NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9168444?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sileod","html_url":"https:\/\/github.com\/sileod","followers_url":"https:\/\/api.github.com\/users\/sileod\/followers","following_url":"https:\/\/api.github.com\/users\/sileod\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sileod\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sileod\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sileod\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sileod\/orgs","repos_url":"https:\/\/api.github.com\/users\/sileod\/repos","events_url":"https:\/\/api.github.com\/users\/sileod\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sileod\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T17:16:54Z","updated_at":"2020-12-14T13:03:14Z","closed_at":"2020-12-14T13:03:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1130","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1130","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1130.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1130.patch","merged_at":"2020-12-14T13:03:14Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1130\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1130\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1129","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1129\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1129\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1129\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1129","id":757255492,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNjYxNzM2","number":1129,"title":"Adding initial version of cord-19 dataset","user":{"login":"ggdupont","id":5583410,"node_id":"MDQ6VXNlcjU1ODM0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5583410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ggdupont","html_url":"https:\/\/github.com\/ggdupont","followers_url":"https:\/\/api.github.com\/users\/ggdupont\/followers","following_url":"https:\/\/api.github.com\/users\/ggdupont\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ggdupont\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ggdupont\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ggdupont\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ggdupont\/orgs","repos_url":"https:\/\/api.github.com\/users\/ggdupont\/repos","events_url":"https:\/\/api.github.com\/users\/ggdupont\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ggdupont\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-04T17:03:17Z","updated_at":"2021-02-09T10:22:35Z","closed_at":"2021-02-09T10:18:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1129","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1129","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1129.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1129.patch","merged_at":null},"body":"Initial version only reading the metadata in CSV.\r\n\r\n### Checklist:\r\n- [x] Create the dataset script \/datasets\/my_dataset\/my_dataset.py using the template\r\n- [x] Fill the _DESCRIPTION and _CITATION variables\r\n- [x] Implement _infos(), _split_generators() and _generate_examples()\r\n- [x] Make sure that the BUILDER_CONFIGS class attribute is filled with the different configurations of the dataset and that the BUILDER_CONFIG_CLASS is specified if there is a custom config class.\r\n- [x] Generate the metadata file dataset_infos.json for all configurations\r\n- [x] Generate the dummy data dummy_data.zip files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card README.md using the template and at least fill the tags\r\n- [x] Both tests for the real data and the dummy data pass.\r\n\r\n### TODO:\r\n- [x] add more metadata\r\n- [x] add full text\r\n- [x] add pre-computed document embedding","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1129\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1129\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1128","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1128\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1128\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1128\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1128","id":757245404,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNjUzMzgy","number":1128,"title":"Add xquad-r dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T16:48:53Z","updated_at":"2020-12-04T18:14:30Z","closed_at":"2020-12-04T18:14:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1128","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1128","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1128.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1128.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1128\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1128\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1127","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1127\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1127\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1127\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1127","id":757229684,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNjQwMjMx","number":1127,"title":"Add wikiqaar dataset","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T16:26:18Z","updated_at":"2020-12-07T16:39:41Z","closed_at":"2020-12-07T16:39:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1127","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1127","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1127.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1127.patch","merged_at":"2020-12-07T16:39:41Z"},"body":"Arabic Wiki Question Answering Corpus.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1127\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1127\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1126","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1126\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1126\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1126\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1126","id":757197735,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNjEzNzcw","number":1126,"title":"Adding babi dataset","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-04T15:42:34Z","updated_at":"2021-03-30T09:44:04Z","closed_at":"2021-03-30T09:44:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1126","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1126","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1126.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1126.patch","merged_at":null},"body":"Adding the English version of bAbI.\r\n\r\nSamples are taken from ParlAI for consistency with the main users at the moment.\r\n\r\nSupersede #945 (problem with the rebase) and adresses the issues mentioned in the review (dummy data are smaller now and code comments are fixed).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1126\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1126\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1125","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1125\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1125\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1125\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1125","id":757194531,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNjExMDU5","number":1125,"title":"Add Urdu fake news dataset.","user":{"login":"chaitnayabasava","id":44389205,"node_id":"MDQ6VXNlcjQ0Mzg5MjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44389205?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chaitnayabasava","html_url":"https:\/\/github.com\/chaitnayabasava","followers_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/followers","following_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/orgs","repos_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/repos","events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-04T15:38:17Z","updated_at":"2020-12-07T03:21:05Z","closed_at":"2020-12-07T03:21:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1125","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1125","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1125.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1125.patch","merged_at":null},"body":"Added Urdu fake news dataset. More information about the dataset can be found here<\/a>.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1125\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1125\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1124","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1124\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1124\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1124\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1124","id":757186983,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNjA0NzY3","number":1124,"title":"Add Xitsonga Ner","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T15:27:44Z","updated_at":"2020-12-06T18:31:35Z","closed_at":"2020-12-06T18:31:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1124","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1124","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1124.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1124.patch","merged_at":null},"body":"Clean Xitsonga Ner PR","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1124\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1124\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1123","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1123\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1123\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1123\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1123","id":757181014,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTk5ODQ3","number":1123,"title":"adding cdt dataset","user":{"login":"abecadel","id":1654113,"node_id":"MDQ6VXNlcjE2NTQxMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1654113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abecadel","html_url":"https:\/\/github.com\/abecadel","followers_url":"https:\/\/api.github.com\/users\/abecadel\/followers","following_url":"https:\/\/api.github.com\/users\/abecadel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abecadel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abecadel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abecadel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abecadel\/orgs","repos_url":"https:\/\/api.github.com\/users\/abecadel\/repos","events_url":"https:\/\/api.github.com\/users\/abecadel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abecadel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-04T15:19:36Z","updated_at":"2020-12-04T17:05:56Z","closed_at":"2020-12-04T17:05:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1123","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1123","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1123.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1123.patch","merged_at":"2020-12-04T17:05:56Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1123\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1123\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1122","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1122\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1122\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1122\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1122","id":757176172,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTk1ODE5","number":1122,"title":"Add Urdu fake news.","user":{"login":"chaitnayabasava","id":44389205,"node_id":"MDQ6VXNlcjQ0Mzg5MjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44389205?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chaitnayabasava","html_url":"https:\/\/github.com\/chaitnayabasava","followers_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/followers","following_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/orgs","repos_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/repos","events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T15:13:10Z","updated_at":"2020-12-04T15:20:07Z","closed_at":"2020-12-04T15:20:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1122","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1122","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1122.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1122.patch","merged_at":null},"body":"Added Urdu fake news dataset. More information about the dataset can be found here<\/a>.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1122\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1122\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1121","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1121\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1121\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1121\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1121","id":757169944,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTkwNjY2","number":1121,"title":"adding cdt dataset","user":{"login":"abecadel","id":1654113,"node_id":"MDQ6VXNlcjE2NTQxMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1654113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abecadel","html_url":"https:\/\/github.com\/abecadel","followers_url":"https:\/\/api.github.com\/users\/abecadel\/followers","following_url":"https:\/\/api.github.com\/users\/abecadel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abecadel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abecadel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abecadel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abecadel\/orgs","repos_url":"https:\/\/api.github.com\/users\/abecadel\/repos","events_url":"https:\/\/api.github.com\/users\/abecadel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abecadel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T15:04:33Z","updated_at":"2020-12-04T15:16:49Z","closed_at":"2020-12-04T15:16:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1121","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1121","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1121.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1121.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1121\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1121\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1120","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1120\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1120\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1120\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1120","id":757166342,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTg3Njk1","number":1120,"title":"Add conda environment activation","user":{"login":"parmarsuraj99","id":9317265,"node_id":"MDQ6VXNlcjkzMTcyNjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9317265?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/parmarsuraj99","html_url":"https:\/\/github.com\/parmarsuraj99","followers_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/followers","following_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/orgs","repos_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/repos","events_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/parmarsuraj99\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T14:59:43Z","updated_at":"2020-12-04T18:34:48Z","closed_at":"2020-12-04T16:40:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1120","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1120","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1120.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1120.patch","merged_at":"2020-12-04T16:40:57Z"},"body":"Added activation of Conda environment before installing.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1120\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1120\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1119","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1119\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1119\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1119\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1119","id":757156781,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTc5ODA5","number":1119,"title":"Add Google Great Code Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T14:46:28Z","updated_at":"2020-12-06T17:33:14Z","closed_at":"2020-12-06T17:33:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1119","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1119","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1119.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1119.patch","merged_at":"2020-12-06T17:33:13Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1119\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1119\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1118","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1118\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1118\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1118\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1118","id":757142350,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTY3ODMw","number":1118,"title":"Add Tashkeela dataset","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-04T14:26:18Z","updated_at":"2020-12-04T15:47:01Z","closed_at":"2020-12-04T15:46:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1118","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1118","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1118.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1118.patch","merged_at":"2020-12-04T15:46:50Z"},"body":"Arabic Vocalized Words Dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1118\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1118\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1117","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1117\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1117\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1117\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1117","id":757133789,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTYwNzM4","number":1117,"title":"Fix incorrect MRQA train+SQuAD URL","user":{"login":"jimmycode","id":6259768,"node_id":"MDQ6VXNlcjYyNTk3Njg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6259768?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jimmycode","html_url":"https:\/\/github.com\/jimmycode","followers_url":"https:\/\/api.github.com\/users\/jimmycode\/followers","following_url":"https:\/\/api.github.com\/users\/jimmycode\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jimmycode\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jimmycode\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jimmycode\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jimmycode\/orgs","repos_url":"https:\/\/api.github.com\/users\/jimmycode\/repos","events_url":"https:\/\/api.github.com\/users\/jimmycode\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jimmycode\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-04T14:14:26Z","updated_at":"2020-12-06T17:14:11Z","closed_at":"2020-12-06T17:14:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1117","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1117","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1117.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1117.patch","merged_at":"2020-12-06T17:14:10Z"},"body":"Fix issue #1115 \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1117\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1117\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1116","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1116\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1116\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1116\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1116","id":757133502,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTYwNDk4","number":1116,"title":"add dbpedia_14 dataset","user":{"login":"hfawaz","id":29229602,"node_id":"MDQ6VXNlcjI5MjI5NjAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29229602?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hfawaz","html_url":"https:\/\/github.com\/hfawaz","followers_url":"https:\/\/api.github.com\/users\/hfawaz\/followers","following_url":"https:\/\/api.github.com\/users\/hfawaz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hfawaz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hfawaz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hfawaz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hfawaz\/orgs","repos_url":"https:\/\/api.github.com\/users\/hfawaz\/repos","events_url":"https:\/\/api.github.com\/users\/hfawaz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hfawaz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-04T14:13:59Z","updated_at":"2020-12-07T10:06:54Z","closed_at":"2020-12-05T15:36:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1116","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1116","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1116.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1116.patch","merged_at":"2020-12-05T15:36:23Z"},"body":"This dataset corresponds to the DBpedia dataset requested in https:\/\/github.com\/huggingface\/datasets\/issues\/353.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1116\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1116\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1115","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1115\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1115\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1115\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1115","id":757127527,"node_id":"MDU6SXNzdWU3NTcxMjc1Mjc=","number":1115,"title":"Incorrect URL for MRQA SQuAD train subset","user":{"login":"jimmycode","id":6259768,"node_id":"MDQ6VXNlcjYyNTk3Njg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6259768?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jimmycode","html_url":"https:\/\/github.com\/jimmycode","followers_url":"https:\/\/api.github.com\/users\/jimmycode\/followers","following_url":"https:\/\/api.github.com\/users\/jimmycode\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jimmycode\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jimmycode\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jimmycode\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jimmycode\/orgs","repos_url":"https:\/\/api.github.com\/users\/jimmycode\/repos","events_url":"https:\/\/api.github.com\/users\/jimmycode\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jimmycode\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T14:05:24Z","updated_at":"2020-12-06T17:14:22Z","closed_at":"2020-12-06T17:14:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"https:\/\/github.com\/huggingface\/datasets\/blob\/4ef4c8f8b7a60e35c6fa21115fca9faae91c9f74\/datasets\/mrqa\/mrqa.py#L53\r\n\r\nThe URL for `train+SQuAD` subset of MRQA points to the dev set instead of train set. It should be `https:\/\/s3.us-east-2.amazonaws.com\/mrqa\/release\/v2\/train\/SQuAD.jsonl.gz`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1115\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1115\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1114","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1114\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1114\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1114\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1114","id":757123638,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTUyMjE1","number":1114,"title":"Add sesotho ner corpus","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T13:59:41Z","updated_at":"2020-12-04T15:02:07Z","closed_at":"2020-12-04T15:02:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1114","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1114","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1114.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1114.patch","merged_at":"2020-12-04T15:02:07Z"},"body":"Clean Sesotho PR","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1114\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1114\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1113","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1113\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1113\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1113\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1113","id":757115557,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTQ1Mzg2","number":1113,"title":"add qed","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T13:47:57Z","updated_at":"2020-12-05T15:46:21Z","closed_at":"2020-12-05T15:41:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1113","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1113","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1113.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1113.patch","merged_at":"2020-12-05T15:41:57Z"},"body":"adding QED: Dataset for Explanations in Question Answering\r\nhttps:\/\/github.com\/google-research-datasets\/QED\r\nhttps:\/\/arxiv.org\/abs\/2009.06354","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1113\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1113\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1112","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1112\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1112\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1112\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1112","id":757108151,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTM5MjE2","number":1112,"title":"Initial version of cord-19 dataset from AllenAI with only the abstract","user":{"login":"ggdupont","id":5583410,"node_id":"MDQ6VXNlcjU1ODM0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5583410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ggdupont","html_url":"https:\/\/github.com\/ggdupont","followers_url":"https:\/\/api.github.com\/users\/ggdupont\/followers","following_url":"https:\/\/api.github.com\/users\/ggdupont\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ggdupont\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ggdupont\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ggdupont\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ggdupont\/orgs","repos_url":"https:\/\/api.github.com\/users\/ggdupont\/repos","events_url":"https:\/\/api.github.com\/users\/ggdupont\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ggdupont\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T13:36:39Z","updated_at":"2020-12-04T16:16:40Z","closed_at":"2020-12-04T16:16:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1112","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1112","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1112.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1112.patch","merged_at":null},"body":"Initial version only reading the metadata in CSV.\r\n\r\n### Checklist:\r\n- [x] Create the dataset script \/datasets\/my_dataset\/my_dataset.py using the template\r\n- [x] Fill the _DESCRIPTION and _CITATION variables\r\n- [x] Implement _infos(), _split_generators() and _generate_examples()\r\n- [x] Make sure that the BUILDER_CONFIGS class attribute is filled with the different configurations of the dataset and that the BUILDER_CONFIG_CLASS is specified if there is a custom config class.\r\n- [x] Generate the metadata file dataset_infos.json for all configurations\r\n- [x] Generate the dummy data dummy_data.zip files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card README.md using the template and at least fill the tags\r\n- [ ] Both tests for the real data and the dummy data pass.\r\n\r\n### TODO:\r\n- [ ] add more metadata\r\n- [ ] add full text\r\n- [ ] add pre-computed document embedding","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1112\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1112\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1111","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1111\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1111\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1111\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1111","id":757083266,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNTE4NDY1","number":1111,"title":"Add Siswati Ner corpus","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T12:57:31Z","updated_at":"2020-12-04T14:43:01Z","closed_at":"2020-12-04T14:43:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1111","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1111","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1111.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1111.patch","merged_at":"2020-12-04T14:43:00Z"},"body":"Clean Siswati PR","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1111\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1111\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1110","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1110\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1110\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1110\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1110","id":757082677,"node_id":"MDU6SXNzdWU3NTcwODI2Nzc=","number":1110,"title":"Using a feature named \"_type\" fails with certain operations","user":{"login":"dcfidalgo","id":15979778,"node_id":"MDQ6VXNlcjE1OTc5Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15979778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dcfidalgo","html_url":"https:\/\/github.com\/dcfidalgo","followers_url":"https:\/\/api.github.com\/users\/dcfidalgo\/followers","following_url":"https:\/\/api.github.com\/users\/dcfidalgo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dcfidalgo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dcfidalgo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dcfidalgo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dcfidalgo\/orgs","repos_url":"https:\/\/api.github.com\/users\/dcfidalgo\/repos","events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T12:56:33Z","updated_at":"2022-01-14T18:07:00Z","closed_at":"2022-01-14T18:07:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"A column named `_type` leads to a `TypeError: unhashable type: 'dict'` for certain operations:\r\n```python\r\nfrom datasets import Dataset, concatenate_datasets\r\n\r\nds = Dataset.from_dict({\"_type\": [\"whatever\"]}).map()\r\nconcatenate_datasets([ds])\r\n# or simply\r\nDataset(ds._data)\r\n```\r\nContext: We are using datasets to persist data coming from elasticsearch to feed to our pipeline, and elasticsearch has a `_type` field, hence the strange name of the column.\r\n\r\nNot sure if you wish to support this specific column name, but if you do i would be happy to try a fix and provide a PR. I already had a look into it and i think the culprit is the `datasets.features.generate_from_dict` function. It uses the hard coded `_type` string to figure out if it reached the end of the nested feature object from a serialized dict.\r\n\r\nBest wishes and keep up the awesome work!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1110\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1110\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1109","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1109\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1109\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1109\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1109","id":757055702,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDk1MDk2","number":1109,"title":"add woz_dialogue","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T12:13:07Z","updated_at":"2020-12-05T15:41:23Z","closed_at":"2020-12-05T15:40:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1109","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1109","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1109.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1109.patch","merged_at":"2020-12-05T15:40:18Z"},"body":"Adding Wizard-of-Oz task oriented dialogue dataset \r\nhttps:\/\/github.com\/nmrksic\/neural-belief-tracker\/tree\/master\/data\/woz\r\nhttps:\/\/arxiv.org\/abs\/1604.04562","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1109\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1109\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1108","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1108\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1108\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1108\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1108","id":757054732,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDk0MjY4","number":1108,"title":"Add Sepedi NER corpus","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T12:11:24Z","updated_at":"2020-12-04T14:39:00Z","closed_at":"2020-12-04T14:39:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1108","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1108","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1108.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1108.patch","merged_at":"2020-12-04T14:39:00Z"},"body":"Finally a clean PR for Sepedi","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1108\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1108\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1107","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1107\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1107\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1107\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1107","id":757031179,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDc0MzMy","number":1107,"title":"Add arsentd_lev dataset","user":{"login":"moussaKam","id":28675016,"node_id":"MDQ6VXNlcjI4Njc1MDE2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28675016?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/moussaKam","html_url":"https:\/\/github.com\/moussaKam","followers_url":"https:\/\/api.github.com\/users\/moussaKam\/followers","following_url":"https:\/\/api.github.com\/users\/moussaKam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/moussaKam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/moussaKam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/moussaKam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/moussaKam\/orgs","repos_url":"https:\/\/api.github.com\/users\/moussaKam\/repos","events_url":"https:\/\/api.github.com\/users\/moussaKam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/moussaKam\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T11:31:04Z","updated_at":"2020-12-05T15:38:09Z","closed_at":"2020-12-05T15:38:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1107","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1107","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1107.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1107.patch","merged_at":"2020-12-05T15:38:09Z"},"body":"Add The Arabic Sentiment Twitter Dataset for Levantine dialect (ArSenTD-LEV)\r\n\r\nPaper: [ArSentD-LEV: A Multi-Topic Corpus for Target-based Sentiment Analysis in Arabic Levantine Tweets](https:\/\/arxiv.org\/abs\/1906.01830)\r\nHomepage: http:\/\/oma-project.com\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1107\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1107\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1106","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1106\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1106\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1106\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1106","id":757027158,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDcwOTM3","number":1106,"title":"Add Urdu fake news","user":{"login":"chaitnayabasava","id":44389205,"node_id":"MDQ6VXNlcjQ0Mzg5MjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44389205?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chaitnayabasava","html_url":"https:\/\/github.com\/chaitnayabasava","followers_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/followers","following_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/orgs","repos_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/repos","events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T11:24:14Z","updated_at":"2020-12-04T14:21:12Z","closed_at":"2020-12-04T14:21:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1106","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1106","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1106.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1106.patch","merged_at":null},"body":"Added Urdu fake news dataset. More information about the dataset can be found here<\/a>.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1106\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1106\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1105","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1105\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1105\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1105\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1105","id":757024162,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDY4NDIw","number":1105,"title":"add xquad_r dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-04T11:19:35Z","updated_at":"2020-12-04T16:37:00Z","closed_at":"2020-12-04T16:37:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1105","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1105","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1105.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1105.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1105\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1105\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1104","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1104\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1104\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1104\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1104","id":757020934,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDY1NzA4","number":1104,"title":"add TLC","user":{"login":"chameleonTK","id":6429850,"node_id":"MDQ6VXNlcjY0Mjk4NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6429850?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chameleonTK","html_url":"https:\/\/github.com\/chameleonTK","followers_url":"https:\/\/api.github.com\/users\/chameleonTK\/followers","following_url":"https:\/\/api.github.com\/users\/chameleonTK\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chameleonTK\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chameleonTK\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chameleonTK\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chameleonTK\/orgs","repos_url":"https:\/\/api.github.com\/users\/chameleonTK\/repos","events_url":"https:\/\/api.github.com\/users\/chameleonTK\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chameleonTK\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T11:14:58Z","updated_at":"2020-12-04T14:29:23Z","closed_at":"2020-12-04T14:29:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1104","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1104","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1104.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1104.patch","merged_at":"2020-12-04T14:29:23Z"},"body":"Added TLC dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1104\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1104\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1103","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1103\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1103\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1103\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1103","id":757016820,"node_id":"MDU6SXNzdWU3NTcwMTY4MjA=","number":1103,"title":"Add support to download kaggle datasets","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T11:08:37Z","updated_at":"2021-07-21T17:38:13Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"We can use API key","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1103\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1103\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1102","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1102\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1102\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1102\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1102","id":757016515,"node_id":"MDU6SXNzdWU3NTcwMTY1MTU=","number":1102,"title":"Add retries to download manager","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"assignees":[{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2020-12-04T11:08:11Z","updated_at":"2020-12-22T15:34:06Z","closed_at":"2020-12-22T15:34:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1102\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1102\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1101","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1101\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1101\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1101\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1101","id":757009226,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDU2MDM4","number":1101,"title":"Add Wikicorpus dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T10:57:26Z","updated_at":"2020-12-09T18:13:10Z","closed_at":"2020-12-09T18:13:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1101","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1101","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1101.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1101.patch","merged_at":"2020-12-09T18:13:09Z"},"body":"Add dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1101\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1101\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1100","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1100\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1100\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1100\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1100","id":756998433,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDQ2ODc1","number":1100,"title":"Urdu fake news","user":{"login":"chaitnayabasava","id":44389205,"node_id":"MDQ6VXNlcjQ0Mzg5MjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44389205?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chaitnayabasava","html_url":"https:\/\/github.com\/chaitnayabasava","followers_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/followers","following_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/orgs","repos_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/repos","events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T10:41:20Z","updated_at":"2020-12-04T11:19:00Z","closed_at":"2020-12-04T11:19:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1100","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1100","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1100.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1100.patch","merged_at":null},"body":"Added Bend the Truth urdu fake news dataset. More inforation here<\/a>.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1100\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1100\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1099","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1099\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1099\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1099\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1099","id":756993540,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDQyODEw","number":1099,"title":"Add tamilmixsentiment data","user":{"login":"jamespaultg","id":7421838,"node_id":"MDQ6VXNlcjc0MjE4Mzg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7421838?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jamespaultg","html_url":"https:\/\/github.com\/jamespaultg","followers_url":"https:\/\/api.github.com\/users\/jamespaultg\/followers","following_url":"https:\/\/api.github.com\/users\/jamespaultg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jamespaultg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jamespaultg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jamespaultg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jamespaultg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jamespaultg\/repos","events_url":"https:\/\/api.github.com\/users\/jamespaultg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jamespaultg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T10:34:07Z","updated_at":"2020-12-06T06:32:22Z","closed_at":"2020-12-05T16:48:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1099","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1099","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1099.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1099.patch","merged_at":"2020-12-05T16:48:33Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1099\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1099\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1098","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1098\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1098\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1098\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1098","id":756975414,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDI3OTE5","number":1098,"title":"Add ToTTo Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T10:07:25Z","updated_at":"2020-12-04T13:38:20Z","closed_at":"2020-12-04T13:38:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1098","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1098","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1098.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1098.patch","merged_at":"2020-12-04T13:38:19Z"},"body":"Adds a brand new table to text dataset: https:\/\/github.com\/google-research-datasets\/ToTTo","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1098\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1098\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1097","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1097\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1097\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1097\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1097","id":756955729,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDExNzQ4","number":1097,"title":"Add MSRA NER labels","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T09:38:16Z","updated_at":"2020-12-04T13:31:59Z","closed_at":"2020-12-04T13:31:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1097","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1097","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1097.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1097.patch","merged_at":"2020-12-04T13:31:58Z"},"body":"Fixes #940 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1097\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1097\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1096","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1096\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1096\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1096\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1096","id":756952461,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyNDA5MDIx","number":1096,"title":"FIX matinf link in ADD_NEW_DATASET.md","user":{"login":"moussaKam","id":28675016,"node_id":"MDQ6VXNlcjI4Njc1MDE2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28675016?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/moussaKam","html_url":"https:\/\/github.com\/moussaKam","followers_url":"https:\/\/api.github.com\/users\/moussaKam\/followers","following_url":"https:\/\/api.github.com\/users\/moussaKam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/moussaKam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/moussaKam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/moussaKam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/moussaKam\/orgs","repos_url":"https:\/\/api.github.com\/users\/moussaKam\/repos","events_url":"https:\/\/api.github.com\/users\/moussaKam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/moussaKam\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T09:33:25Z","updated_at":"2020-12-04T14:25:35Z","closed_at":"2020-12-04T14:25:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1096","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1096","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1096.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1096.patch","merged_at":"2020-12-04T14:25:35Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1096\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1096\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1095","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1095\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1095\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1095\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1095","id":756934964,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMzk0Nzgy","number":1095,"title":"Add TupleInf Open IE Dataset","user":{"login":"mattbui","id":46804938,"node_id":"MDQ6VXNlcjQ2ODA0OTM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46804938?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mattbui","html_url":"https:\/\/github.com\/mattbui","followers_url":"https:\/\/api.github.com\/users\/mattbui\/followers","following_url":"https:\/\/api.github.com\/users\/mattbui\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mattbui\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mattbui\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mattbui\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mattbui\/orgs","repos_url":"https:\/\/api.github.com\/users\/mattbui\/repos","events_url":"https:\/\/api.github.com\/users\/mattbui\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mattbui\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-04T09:08:07Z","updated_at":"2020-12-04T15:40:54Z","closed_at":"2020-12-04T15:40:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1095","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1095","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1095.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1095.patch","merged_at":"2020-12-04T15:40:54Z"},"body":"For more information: https:\/\/allenai.org\/data\/tuple-ie","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1095\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1095\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1094","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1094\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1094\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1094\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1094","id":756927060,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMzg5MDQ4","number":1094,"title":"add urdu fake news dataset","user":{"login":"chaitnayabasava","id":44389205,"node_id":"MDQ6VXNlcjQ0Mzg5MjA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44389205?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chaitnayabasava","html_url":"https:\/\/github.com\/chaitnayabasava","followers_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/followers","following_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/orgs","repos_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/repos","events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chaitnayabasava\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T08:57:38Z","updated_at":"2020-12-04T09:20:56Z","closed_at":"2020-12-04T09:20:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1094","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1094","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1094.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1094.patch","merged_at":null},"body":"Added Urdu fake news dataset. The dataset can be found here<\/a>.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1094\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1094\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1093","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1093\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1093\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1093\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1093","id":756916565,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMzgxNjkw","number":1093,"title":"Add NCBI Disease Corpus dataset","user":{"login":"edugp","id":17855740,"node_id":"MDQ6VXNlcjE3ODU1NzQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17855740?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/edugp","html_url":"https:\/\/github.com\/edugp","followers_url":"https:\/\/api.github.com\/users\/edugp\/followers","following_url":"https:\/\/api.github.com\/users\/edugp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/edugp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/edugp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/edugp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/edugp\/orgs","repos_url":"https:\/\/api.github.com\/users\/edugp\/repos","events_url":"https:\/\/api.github.com\/users\/edugp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/edugp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T08:42:32Z","updated_at":"2020-12-04T11:15:12Z","closed_at":"2020-12-04T11:15:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1093","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1093","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1093.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1093.patch","merged_at":"2020-12-04T11:15:12Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1093\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1093\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1092","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1092\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1092\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1092\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1092","id":756913134,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMzc5MDY0","number":1092,"title":"Add Coached Conversation Preference Dataset","user":{"login":"vineeths96","id":50873201,"node_id":"MDQ6VXNlcjUwODczMjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50873201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vineeths96","html_url":"https:\/\/github.com\/vineeths96","followers_url":"https:\/\/api.github.com\/users\/vineeths96\/followers","following_url":"https:\/\/api.github.com\/users\/vineeths96\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vineeths96\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vineeths96\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vineeths96\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vineeths96\/orgs","repos_url":"https:\/\/api.github.com\/users\/vineeths96\/repos","events_url":"https:\/\/api.github.com\/users\/vineeths96\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vineeths96\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T08:36:49Z","updated_at":"2020-12-20T13:34:00Z","closed_at":"2020-12-04T13:49:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1092","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1092","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1092.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1092.patch","merged_at":"2020-12-04T13:49:50Z"},"body":"Adding [Coached Conversation Preference Dataset](https:\/\/research.google\/tools\/datasets\/coached-conversational-preference-elicitation\/)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1092\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1092\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1091","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1091\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1091\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1091\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1091","id":756841254,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMzE5MDk5","number":1091,"title":"Add Google wellformed query dataset","user":{"login":"vasudevgupta7","id":53136577,"node_id":"MDQ6VXNlcjUzMTM2NTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53136577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vasudevgupta7","html_url":"https:\/\/github.com\/vasudevgupta7","followers_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/followers","following_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/orgs","repos_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/repos","events_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vasudevgupta7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T06:25:54Z","updated_at":"2020-12-06T17:43:03Z","closed_at":"2020-12-06T17:43:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1091","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1091","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1091.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1091.patch","merged_at":"2020-12-06T17:43:02Z"},"body":"This pull request will add Google wellformed_query dataset. Link of dataset is https:\/\/github.com\/google-research-datasets\/query-wellformedness","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1091\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1091\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1090","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1090\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1090\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1090\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1090","id":756825941,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMzA1OTk1","number":1090,"title":"add thaisum","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T05:54:48Z","updated_at":"2020-12-04T11:16:06Z","closed_at":"2020-12-04T11:16:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1090","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1090","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1090.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1090.patch","merged_at":"2020-12-04T11:16:06Z"},"body":"ThaiSum, a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath, ThaiPBS, Prachathai, and The Standard. This dataset consists of over 350,000 article and summary pairs written by journalists. We evaluate the performance of various existing summarization models on ThaiSum dataset and analyse the characteristic of the dataset to present its difficulties.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1090\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1090\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1089","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1089\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1089\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1089\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1089","id":756823690,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMzA0MDM2","number":1089,"title":"add sharc_modified","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T05:49:49Z","updated_at":"2020-12-04T10:41:30Z","closed_at":"2020-12-04T10:31:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1089","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1089","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1089.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1089.patch","merged_at":"2020-12-04T10:31:44Z"},"body":"Adding modified ShARC dataset https:\/\/github.com\/nikhilweee\/neural-conv-qa","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1089\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1089\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1088","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1088\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1088\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1088\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1088","id":756822017,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMzAyNjIz","number":1088,"title":"add xquad_r dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T05:45:55Z","updated_at":"2020-12-04T10:58:13Z","closed_at":"2020-12-04T10:47:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1088","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1088","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1088.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1088.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1088\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1088\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1087","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1087\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1087\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1087\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1087","id":756794430,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMjc5NDI3","number":1087,"title":"Add Big Patent dataset","user":{"login":"mattbui","id":46804938,"node_id":"MDQ6VXNlcjQ2ODA0OTM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46804938?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mattbui","html_url":"https:\/\/github.com\/mattbui","followers_url":"https:\/\/api.github.com\/users\/mattbui\/followers","following_url":"https:\/\/api.github.com\/users\/mattbui\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mattbui\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mattbui\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mattbui\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mattbui\/orgs","repos_url":"https:\/\/api.github.com\/users\/mattbui\/repos","events_url":"https:\/\/api.github.com\/users\/mattbui\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mattbui\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-04T04:37:30Z","updated_at":"2020-12-06T17:21:00Z","closed_at":"2020-12-06T17:20:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1087","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1087","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1087.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1087.patch","merged_at":"2020-12-06T17:20:59Z"},"body":"* More info on the dataset: https:\/\/evasharma.github.io\/bigpatent\/\r\n* There's another raw version of the dataset available from tfds. However, they're quite large so I don't have the resources to fully test all the configs for that version yet. We'll try to add it later.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1087\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1087\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1086","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1086\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1086\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1086\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1086","id":756720643,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMjIzNDEy","number":1086,"title":"adding cdt dataset","user":{"login":"abecadel","id":1654113,"node_id":"MDQ6VXNlcjE2NTQxMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1654113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abecadel","html_url":"https:\/\/github.com\/abecadel","followers_url":"https:\/\/api.github.com\/users\/abecadel\/followers","following_url":"https:\/\/api.github.com\/users\/abecadel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abecadel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abecadel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abecadel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abecadel\/orgs","repos_url":"https:\/\/api.github.com\/users\/abecadel\/repos","events_url":"https:\/\/api.github.com\/users\/abecadel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abecadel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-04T01:28:11Z","updated_at":"2020-12-04T15:04:02Z","closed_at":"2020-12-04T15:04:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1086","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1086","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1086.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1086.patch","merged_at":null},"body":"- **Name:** *Cyberbullying Detection Task*\r\n- **Description:** *The Cyberbullying Detection task was part of 2019 edition of PolEval competition. The goal is to predict if a given Twitter message contains a cyberbullying (harmful) content.*\r\n- **Data:** *https:\/\/github.com\/ptaszynski\/cyberbullying-Polish*\r\n- **Motivation:** *The KLEJ benchmark (Kompleksowa Lista Ewaluacji J\u0119zykowych) is a set of nine evaluation tasks for the Polish language understanding.*","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1086\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1086\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1085","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1085\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1085\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1085\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1085","id":756704563,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMjExNTA4","number":1085,"title":"add mutual friends conversational dataset","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T00:48:21Z","updated_at":"2020-12-16T15:58:31Z","closed_at":"2020-12-16T15:58:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1085","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1085","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1085.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1085.patch","merged_at":"2020-12-16T15:58:30Z"},"body":"Mutual friends dataset\r\nWIP\r\n\r\nTODO:\r\n- scenario_kbs (bug with pyarrow conversion)\r\n- download from codalab checksums bug","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1085\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1085\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1084","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1084\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1084\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1084\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1084","id":756688727,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMTk4MTM3","number":1084,"title":"adding cdsc dataset","user":{"login":"abecadel","id":1654113,"node_id":"MDQ6VXNlcjE2NTQxMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1654113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abecadel","html_url":"https:\/\/github.com\/abecadel","followers_url":"https:\/\/api.github.com\/users\/abecadel\/followers","following_url":"https:\/\/api.github.com\/users\/abecadel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abecadel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abecadel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abecadel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abecadel\/orgs","repos_url":"https:\/\/api.github.com\/users\/abecadel\/repos","events_url":"https:\/\/api.github.com\/users\/abecadel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abecadel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-04T00:10:05Z","updated_at":"2020-12-04T10:41:26Z","closed_at":"2020-12-04T10:41:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1084","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1084","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1084.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1084.patch","merged_at":"2020-12-04T10:41:26Z"},"body":"- **Name**: *cdsc (domains: cdsc-e & cdsc-r)*\r\n- **Description**: *Polish CDSCorpus consists of 10K Polish sentence pairs which are human-annotated for semantic relatedness and entailment. The dataset may be used for the evaluation of compositional distributional semantics models of Polish. The dataset was presented at ACL 2017. Please refer to the Wr\u00f3blewska and Krasnowska-Kiera\u015b (2017) for a detailed description of the resource.*\r\n- **Data**: *http:\/\/2019.poleval.pl\/index.php\/tasks\/*\r\n- **Motivation**: *The KLEJ benchmark (Kompleksowa Lista Ewaluacji J\u0119zykowych) is a set of nine evaluation tasks for the Polish language understanding.*","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1084\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1084\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1083","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1083\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1083\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1083\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1083","id":756687101,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMTk2Nzc0","number":1083,"title":"Add the multilingual Exams dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-04T00:06:04Z","updated_at":"2020-12-04T17:12:00Z","closed_at":"2020-12-04T17:12:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1083","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1083","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1083.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1083.patch","merged_at":"2020-12-04T17:12:00Z"},"body":"https:\/\/github.com\/mhardalov\/exams-qa\r\n\r\n`multilingual` configs have all languages mixed together\r\n\r\n`crosslingual` mixes the languages for test but separates them for train and dec, so I've made one config per language for train\/dev data and one config with the joint test set","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1083\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1083\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1082","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1082\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1082\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1082\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1082","id":756676218,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMTg3ODg3","number":1082,"title":"Myanmar news dataset","user":{"login":"mapmeld","id":643918,"node_id":"MDQ6VXNlcjY0MzkxOA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/643918?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mapmeld","html_url":"https:\/\/github.com\/mapmeld","followers_url":"https:\/\/api.github.com\/users\/mapmeld\/followers","following_url":"https:\/\/api.github.com\/users\/mapmeld\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mapmeld\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mapmeld\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mapmeld\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mapmeld\/orgs","repos_url":"https:\/\/api.github.com\/users\/mapmeld\/repos","events_url":"https:\/\/api.github.com\/users\/mapmeld\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mapmeld\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T23:39:00Z","updated_at":"2020-12-04T10:13:38Z","closed_at":"2020-12-04T10:13:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1082","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1082","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1082.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1082.patch","merged_at":"2020-12-04T10:13:38Z"},"body":"Add news topic classification dataset in Myanmar \/ Burmese languagess\r\n\r\nThis data was collected in 2017 by Aye Hninn Khine , and published on GitHub with a GPL license\r\nhttps:\/\/github.com\/ayehninnkhine\/MyanmarNewsClassificationSystem\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1082\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1082\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1081","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1081\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1081\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1081\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1081","id":756672527,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMTg0ODc4","number":1081,"title":"Add Knowledge-Enhanced Language Model Pre-training (KELM)","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T23:30:09Z","updated_at":"2020-12-04T16:36:28Z","closed_at":"2020-12-04T16:36:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1081","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1081","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1081.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1081.patch","merged_at":"2020-12-04T16:36:28Z"},"body":"Adds the KELM dataset.\r\n\r\n- Webpage\/repo: https:\/\/github.com\/google-research-datasets\/KELM-corpus\r\n- Paper: https:\/\/arxiv.org\/pdf\/2010.12688.pdf","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1081\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1081\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1080","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1080\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1080\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1080\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1080","id":756663464,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMTc3NDg5","number":1080,"title":"Add WikiANN NER dataset","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T23:09:24Z","updated_at":"2020-12-06T17:18:55Z","closed_at":"2020-12-06T17:18:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1080","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1080","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1080.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1080.patch","merged_at":"2020-12-06T17:18:55Z"},"body":"This PR adds the full set of 176 languages from the balanced train\/dev\/test splits of WikiANN \/ PAN-X from: https:\/\/github.com\/afshinrahimi\/mmner\r\n\r\nUntil now, only 40 of these languages were available in `datasets` as part of the XTREME benchmark\r\n\r\nCourtesy of the dataset author, we can now download this dataset from a Dropbox URL without needing a manual download anymore \ud83e\udd73, so at some point it would be worth updating the PAN-X subset of XTREME as well \ud83d\ude04 \r\n\r\nLink to gist with some snippets for producing dummy data: https:\/\/gist.github.com\/lewtun\/5b93294ab6dbcf59d1493dbe2cfd6bb9\r\n\r\nP.S. @yjernite I think I was confused about needing to generate a set of YAML tags per config, so ended up just adding a single one in the README.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1080\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1080\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1079","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1079\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1079\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1079\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1079","id":756652427,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMTY4Nzky","number":1079,"title":"nkjp-ner","user":{"login":"abecadel","id":1654113,"node_id":"MDQ6VXNlcjE2NTQxMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1654113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abecadel","html_url":"https:\/\/github.com\/abecadel","followers_url":"https:\/\/api.github.com\/users\/abecadel\/followers","following_url":"https:\/\/api.github.com\/users\/abecadel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abecadel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abecadel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abecadel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abecadel\/orgs","repos_url":"https:\/\/api.github.com\/users\/abecadel\/repos","events_url":"https:\/\/api.github.com\/users\/abecadel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abecadel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T22:47:26Z","updated_at":"2020-12-04T09:42:06Z","closed_at":"2020-12-04T09:42:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1079","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1079","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1079.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1079.patch","merged_at":"2020-12-04T09:42:06Z"},"body":"- **Name:** *nkjp-ner*\r\n- **Description:** *The NKJP-NER is based on a human-annotated part of NKJP. We extracted sentences with named entities of exactly one type. The task is to predict the type of the named entity.*\r\n- **Data:** *https:\/\/klejbenchmark.com\/tasks\/*\r\n- **Motivation:** *The KLEJ benchmark (Kompleksowa Lista Ewaluacji J\u0119zykowych) is a set of nine evaluation tasks for the Polish language understanding.*\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1079\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1079\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1078","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1078\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1078\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1078\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1078","id":756633215,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMTUyMzgx","number":1078,"title":"add AJGT dataset","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T22:16:31Z","updated_at":"2020-12-04T09:55:15Z","closed_at":"2020-12-04T09:55:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1078","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1078","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1078.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1078.patch","merged_at":"2020-12-04T09:55:15Z"},"body":"Arabic Jordanian General Tweets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1078\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1078\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1077","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1077\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1077\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1077\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1077","id":756617964,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMTM5ODMx","number":1077,"title":"Added glucose dataset","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T21:49:01Z","updated_at":"2020-12-04T09:55:53Z","closed_at":"2020-12-04T09:55:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1077","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1077","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1077.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1077.patch","merged_at":"2020-12-04T09:55:52Z"},"body":"This PR adds the [Glucose](https:\/\/github.com\/ElementalCognition\/glucose) dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1077\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1077\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1076","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1076\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1076\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1076\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1076","id":756584328,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMTExNDU5","number":1076,"title":"quac quac \/ coin coin","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T20:55:29Z","updated_at":"2020-12-04T16:36:39Z","closed_at":"2020-12-04T09:15:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1076","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1076","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1076.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1076.patch","merged_at":"2020-12-04T09:15:20Z"},"body":"Add QUAC (Question Answering in Context)\r\nI linearized most of the dictionnaries to lists.\r\nReferenced to the authors' datasheet for the dataset card.\r\n\ud83e\udd86\ud83e\udd86\ud83e\udd86\r\nCoin coin","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1076\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1076\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1075","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1075\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1075\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1075\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1075","id":756501235,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMDM4ODg1","number":1075,"title":"adding cleaned verion of E2E NLG","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T19:21:07Z","updated_at":"2020-12-03T19:43:56Z","closed_at":"2020-12-03T19:43:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1075","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1075","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1075.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1075.patch","merged_at":"2020-12-03T19:43:56Z"},"body":"Found at: https:\/\/github.com\/tuetschek\/e2e-cleaning","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1075\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1075\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1074","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1074\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1074\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1074\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1074","id":756483172,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMDIyNTIy","number":1074,"title":"Swedish MT STS-B","user":{"login":"timpal0l","id":6556710,"node_id":"MDQ6VXNlcjY1NTY3MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6556710?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timpal0l","html_url":"https:\/\/github.com\/timpal0l","followers_url":"https:\/\/api.github.com\/users\/timpal0l\/followers","following_url":"https:\/\/api.github.com\/users\/timpal0l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timpal0l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timpal0l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timpal0l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timpal0l\/orgs","repos_url":"https:\/\/api.github.com\/users\/timpal0l\/repos","events_url":"https:\/\/api.github.com\/users\/timpal0l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timpal0l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T19:06:25Z","updated_at":"2020-12-04T20:22:27Z","closed_at":"2020-12-03T20:44:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1074","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1074","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1074.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1074.patch","merged_at":"2020-12-03T20:44:28Z"},"body":"Added a Swedish machine translated version of the well known STS-B Corpus","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1074\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1074\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1073","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1073\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1073\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1073\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1073","id":756468034,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMyMDA4NjIw","number":1073,"title":"Add DialogRE dataset","user":{"login":"vineeths96","id":50873201,"node_id":"MDQ6VXNlcjUwODczMjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50873201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vineeths96","html_url":"https:\/\/github.com\/vineeths96","followers_url":"https:\/\/api.github.com\/users\/vineeths96\/followers","following_url":"https:\/\/api.github.com\/users\/vineeths96\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vineeths96\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vineeths96\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vineeths96\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vineeths96\/orgs","repos_url":"https:\/\/api.github.com\/users\/vineeths96\/repos","events_url":"https:\/\/api.github.com\/users\/vineeths96\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vineeths96\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T18:56:40Z","updated_at":"2020-12-20T13:34:48Z","closed_at":"2020-12-04T13:41:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1073","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1073","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1073.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1073.patch","merged_at":"2020-12-04T13:41:51Z"},"body":"Adding the [DialogRE](https:\/\/github.com\/nlpdata\/dialogre) dataset Version 2.\r\n\r\n- All tests passed successfully.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1073\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1073\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1072","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1072\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1072\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1072\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1072","id":756454511,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTk2Njky","number":1072,"title":"actually uses the previously declared VERSION on the configs in the template","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T18:44:27Z","updated_at":"2020-12-03T19:35:46Z","closed_at":"2020-12-03T19:35:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1072","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1072","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1072.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1072.patch","merged_at":"2020-12-03T19:35:46Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1072\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1072\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1071","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1071\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1071\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1071\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1071","id":756447296,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTkwNzY1","number":1071,"title":"add xlrd to test package requirements","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T18:32:47Z","updated_at":"2020-12-03T18:47:16Z","closed_at":"2020-12-03T18:47:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1071","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1071","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1071.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1071.patch","merged_at":"2020-12-03T18:47:15Z"},"body":"Adds `xlrd` package to the test requirements to handle scripts that use `pandas` to load excel files","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1071\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1071\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1070","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1070\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1070\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1070\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1070","id":756442481,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTg2Nzcz","number":1070,"title":"add conv_ai","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-03T18:25:20Z","updated_at":"2020-12-04T07:58:35Z","closed_at":"2020-12-04T06:44:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1070","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1070","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1070.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1070.patch","merged_at":"2020-12-04T06:44:34Z"},"body":"Adding ConvAI dataset https:\/\/github.com\/DeepPavlov\/convai\/tree\/master\/2017","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1070\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1070\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1069","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1069\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1069\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1069\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1069","id":756425737,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTcyNjYz","number":1069,"title":"Test","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T18:01:45Z","updated_at":"2020-12-04T04:24:18Z","closed_at":"2020-12-04T04:24:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1069","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1069","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1069.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1069.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1069\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1069\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1068","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1068\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1068\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1068\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1068","id":756417337,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTY1MDk0","number":1068,"title":"Add Pubmed (citation + abstract) dataset (2020).","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-03T17:54:10Z","updated_at":"2020-12-23T09:52:07Z","closed_at":"2020-12-23T09:52:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1068","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1068","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1068.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1068.patch","merged_at":"2020-12-23T09:52:07Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1068\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1068\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1067","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1067\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1067\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1067\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1067","id":756414212,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTYyNDYx","number":1067,"title":"add xquad-r dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T17:50:01Z","updated_at":"2020-12-03T17:53:21Z","closed_at":"2020-12-03T17:53:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1067","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1067","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1067.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1067.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1067\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1067\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1066","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1066\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1066\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1066\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1066","id":756391957,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTQ0MDc0","number":1066,"title":"Add ChrEn","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-03T17:17:48Z","updated_at":"2020-12-03T21:49:39Z","closed_at":"2020-12-03T21:49:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1066","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1066","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1066.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1066.patch","merged_at":"2020-12-03T21:49:39Z"},"body":"Adding the Cherokee English machine translation dataset of https:\/\/github.com\/ZhangShiyue\/ChrEn","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1066\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1066\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1065","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1065\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1065\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1065\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1065","id":756383414,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTM2OTQ3","number":1065,"title":"add xquad-r dataset","user":{"login":"manandey","id":6687858,"node_id":"MDQ6VXNlcjY2ODc4NTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6687858?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/manandey","html_url":"https:\/\/github.com\/manandey","followers_url":"https:\/\/api.github.com\/users\/manandey\/followers","following_url":"https:\/\/api.github.com\/users\/manandey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/manandey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/manandey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/manandey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/manandey\/orgs","repos_url":"https:\/\/api.github.com\/users\/manandey\/repos","events_url":"https:\/\/api.github.com\/users\/manandey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/manandey\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T17:06:23Z","updated_at":"2020-12-03T17:42:21Z","closed_at":"2020-12-03T17:42:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1065","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1065","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1065.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1065.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1065\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1065\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1064","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1064\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1064\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1064\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1064","id":756382186,"node_id":"MDU6SXNzdWU3NTYzODIxODY=","number":1064,"title":"Not support links with 302 redirect ","user":{"login":"chameleonTK","id":6429850,"node_id":"MDQ6VXNlcjY0Mjk4NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6429850?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chameleonTK","html_url":"https:\/\/github.com\/chameleonTK","followers_url":"https:\/\/api.github.com\/users\/chameleonTK\/followers","following_url":"https:\/\/api.github.com\/users\/chameleonTK\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chameleonTK\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chameleonTK\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chameleonTK\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chameleonTK\/orgs","repos_url":"https:\/\/api.github.com\/users\/chameleonTK\/repos","events_url":"https:\/\/api.github.com\/users\/chameleonTK\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chameleonTK\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-03T17:04:43Z","updated_at":"2021-01-14T02:51:25Z","closed_at":"2021-01-14T02:51:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I have an issue adding this download link https:\/\/github.com\/jitkapat\/thailitcorpus\/releases\/download\/v.2.0\/tlc_v.2.0.tar.gz\r\n\r\nit might be because it is not a direct link (it returns 302 and redirects to aws that returns 403 for head requests). \r\n\r\n```\r\nr.head(\"https:\/\/github.com\/jitkapat\/thailitcorpus\/releases\/download\/v.2.0\/tlc_v.2.0.tar.gz\", allow_redirects=True) \r\n# \r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1064\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1064\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1063","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1063\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1063\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1063\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1063","id":756376374,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTMxMTMz","number":1063,"title":"Add the Ud treebank","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T16:56:41Z","updated_at":"2020-12-04T16:11:54Z","closed_at":"2020-12-04T15:51:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1063","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1063","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1063.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1063.patch","merged_at":"2020-12-04T15:51:45Z"},"body":"This PR adds the 183 datasets in 104 languages of the UD Treebank.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1063\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1063\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1062","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1062\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1062\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1062\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1062","id":756373187,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTI4NDY5","number":1062,"title":"Add KorNLU dataset","user":{"login":"sumanthd17","id":28291870,"node_id":"MDQ6VXNlcjI4MjkxODcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28291870?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sumanthd17","html_url":"https:\/\/github.com\/sumanthd17","followers_url":"https:\/\/api.github.com\/users\/sumanthd17\/followers","following_url":"https:\/\/api.github.com\/users\/sumanthd17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sumanthd17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sumanthd17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sumanthd17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sumanthd17\/orgs","repos_url":"https:\/\/api.github.com\/users\/sumanthd17\/repos","events_url":"https:\/\/api.github.com\/users\/sumanthd17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sumanthd17\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T16:52:39Z","updated_at":"2020-12-04T11:05:19Z","closed_at":"2020-12-04T11:05:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1062","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1062","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1062.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1062.patch","merged_at":"2020-12-04T11:05:19Z"},"body":"Added Korean NLU datasets. The link to the dataset can be found [here](https:\/\/github.com\/kakaobrain\/KorNLUDatasets) and the paper can be found [here](https:\/\/arxiv.org\/abs\/2004.03289)\r\n\r\n**Note**: The MNLI tsv file is broken, so this code currently excludes the file. Please suggest other alternative if any @lhoestq \r\n\r\n- [x] Followed the instructions in CONTRIBUTING.md\r\n- [x] Ran the tests successfully\r\n- [x] Created the dummy data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1062\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1062\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1061","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1061\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1061\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1061\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1061","id":756362661,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTE5ODA0","number":1061,"title":"add labr dataset","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T16:38:57Z","updated_at":"2020-12-03T18:25:44Z","closed_at":"2020-12-03T18:25:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1061","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1061","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1061.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1061.patch","merged_at":"2020-12-03T18:25:44Z"},"body":"Arabic Book Reviews dataset. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1061\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1061\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1060","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1060\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1060\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1060\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1060","id":756349001,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTA4MTgx","number":1060,"title":"Fix squad V2 metric script","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-03T16:23:32Z","updated_at":"2020-12-22T15:02:20Z","closed_at":"2020-12-22T15:02:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1060","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1060","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1060.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1060.patch","merged_at":"2020-12-22T15:02:19Z"},"body":"The current squad v2 metric doesn't work with the squad (v1 or v2) datasets. The script is copied from `squad_evaluate` in transformers that requires the labels (with multiple answers) to be like this:\r\n```\r\nreferences = [{'id': 'a', 'answers': [\r\n {'text': 'Denver Broncos', 'answer_start': 177},\r\n {'text': 'Denver Broncos', 'answer_start': 177}\r\n]}]\r\n```\r\nwhile the dataset had references like this:\r\n```\r\nreferences = [{'id': 'a', 'answers': \r\n {'text': ['Denver Broncos' 'Denver Broncos'], 'answer_start': [177, 177]}\r\n}]\r\n```\r\n\r\nUsing one or the other format fails with the current squad v2 metric:\r\n```\r\nfrom datasets import load_metric\r\nmetric = load_metric(\"squad_v2\")\r\npredictions = [{'id': 'a', 'prediction_text': 'Denver Broncos', 'no_answer_probability': 0.0}]\r\nreferences = [{'id': 'a', 'answers': [\r\n {'text': 'Denver Broncos', 'answer_start': 177},\r\n {'text': 'Denver Broncos', 'answer_start': 177}\r\n]}]\r\nmetric.compute(predictions=predictions, references=references)\r\n```\r\nfails as well as\r\n```\r\nfrom datasets import load_metric\r\nmetric = load_metric(\"squad_v2\")\r\npredictions = [{'id': 'a', 'prediction_text': 'Denver Broncos', 'no_answer_probability': 0.0}]\r\nreferences = [{'id': 'a', 'answers': \r\n {'text': ['Denver Broncos' 'Denver Broncos'], 'answer_start': [177, 177]}\r\n}]\r\nmetric.compute(predictions=predictions, references=references)\r\n```\r\n\r\nThis is because arrow reformats the references behind the scene.\r\n\r\nWith this PR (tested locally), both the snippets up there work and return proper results.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1060\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1060\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1059","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1059\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1059\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1059\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1059","id":756348623,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxOTA3ODYy","number":1059,"title":"Add TLC","user":{"login":"chameleonTK","id":6429850,"node_id":"MDQ6VXNlcjY0Mjk4NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6429850?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chameleonTK","html_url":"https:\/\/github.com\/chameleonTK","followers_url":"https:\/\/api.github.com\/users\/chameleonTK\/followers","following_url":"https:\/\/api.github.com\/users\/chameleonTK\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chameleonTK\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chameleonTK\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chameleonTK\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chameleonTK\/orgs","repos_url":"https:\/\/api.github.com\/users\/chameleonTK\/repos","events_url":"https:\/\/api.github.com\/users\/chameleonTK\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chameleonTK\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-03T16:23:06Z","updated_at":"2020-12-04T11:15:33Z","closed_at":"2020-12-04T11:15:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1059","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1059","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1059.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1059.patch","merged_at":null},"body":"Added TLC dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1059\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1059\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1058","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1058\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1058\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1058\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1058","id":756332704,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxODk0Mjc0","number":1058,"title":"added paws-x dataset","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T16:06:01Z","updated_at":"2020-12-04T13:46:05Z","closed_at":"2020-12-04T13:46:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1058","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1058","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1058.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1058.patch","merged_at":"2020-12-04T13:46:05Z"},"body":"Added paws-x dataset. Updating README and tags in the dataset card in a while","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1058\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1058\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1057","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1057\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1057\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1057\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1057","id":756331419,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxODkzMjE4","number":1057,"title":"Adding TamilMixSentiment","user":{"login":"jamespaultg","id":7421838,"node_id":"MDQ6VXNlcjc0MjE4Mzg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7421838?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jamespaultg","html_url":"https:\/\/github.com\/jamespaultg","followers_url":"https:\/\/api.github.com\/users\/jamespaultg\/followers","following_url":"https:\/\/api.github.com\/users\/jamespaultg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jamespaultg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jamespaultg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jamespaultg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jamespaultg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jamespaultg\/repos","events_url":"https:\/\/api.github.com\/users\/jamespaultg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jamespaultg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T16:04:25Z","updated_at":"2020-12-04T10:09:34Z","closed_at":"2020-12-04T10:09:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1057","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1057","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1057.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1057.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1057\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1057\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1056","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1056\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1056\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1056\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1056","id":756309828,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxODc1MjA2","number":1056,"title":"Add deal_or_no_dialog","user":{"login":"moussaKam","id":28675016,"node_id":"MDQ6VXNlcjI4Njc1MDE2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28675016?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/moussaKam","html_url":"https:\/\/github.com\/moussaKam","followers_url":"https:\/\/api.github.com\/users\/moussaKam\/followers","following_url":"https:\/\/api.github.com\/users\/moussaKam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/moussaKam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/moussaKam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/moussaKam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/moussaKam\/orgs","repos_url":"https:\/\/api.github.com\/users\/moussaKam\/repos","events_url":"https:\/\/api.github.com\/users\/moussaKam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/moussaKam\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T15:38:07Z","updated_at":"2020-12-03T18:13:45Z","closed_at":"2020-12-03T18:13:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1056","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1056","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1056.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1056.patch","merged_at":"2020-12-03T18:13:45Z"},"body":"Add deal_or_no_dialog Dataset\r\n\r\ngithub: https:\/\/github.com\/facebookresearch\/end-to-end-negotiator\r\nPaper: [Deal or No Deal? End-to-End Learning for Negotiation Dialogues](https:\/\/arxiv.org\/abs\/1706.05125)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1056\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1056\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1055","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1055\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1055\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1055\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1055","id":756298372,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxODY1NjM4","number":1055,"title":"Add hebrew-sentiment","user":{"login":"elronbandel","id":23455264,"node_id":"MDQ6VXNlcjIzNDU1MjY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23455264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/elronbandel","html_url":"https:\/\/github.com\/elronbandel","followers_url":"https:\/\/api.github.com\/users\/elronbandel\/followers","following_url":"https:\/\/api.github.com\/users\/elronbandel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/elronbandel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/elronbandel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/elronbandel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/elronbandel\/orgs","repos_url":"https:\/\/api.github.com\/users\/elronbandel\/repos","events_url":"https:\/\/api.github.com\/users\/elronbandel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/elronbandel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-03T15:24:31Z","updated_at":"2020-12-04T11:24:16Z","closed_at":"2020-12-04T11:24:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1055","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1055","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1055.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1055.patch","merged_at":"2020-12-04T11:24:16Z"},"body":"hebrew-sentiment dataset is ready! (including tests, tags etc)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1055\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1055\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1054","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1054\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1054\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1054\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1054","id":756265688,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxODM3NzQ0","number":1054,"title":"Add dataset - SemEval 2014 - Task 1","user":{"login":"ashmeet13","id":24266995,"node_id":"MDQ6VXNlcjI0MjY2OTk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24266995?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ashmeet13","html_url":"https:\/\/github.com\/ashmeet13","followers_url":"https:\/\/api.github.com\/users\/ashmeet13\/followers","following_url":"https:\/\/api.github.com\/users\/ashmeet13\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ashmeet13\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ashmeet13\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ashmeet13\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ashmeet13\/orgs","repos_url":"https:\/\/api.github.com\/users\/ashmeet13\/repos","events_url":"https:\/\/api.github.com\/users\/ashmeet13\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ashmeet13\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T14:52:59Z","updated_at":"2020-12-04T00:52:44Z","closed_at":"2020-12-04T00:52:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1054","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1054","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1054.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1054.patch","merged_at":"2020-12-04T00:52:43Z"},"body":"Adding the dataset of SemEval 2014 Task 1\r\n\r\nFound the dataset under the shared Google Sheet > Recurring Task Datasets\r\nTask Homepage - https:\/\/alt.qcri.org\/semeval2014\/task1\r\n\r\nThank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1054\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1054\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1053","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1053\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1053\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1053\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1053","id":756176061,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNzYyNzg4","number":1053,"title":"Fix dataset URL and file names, and add column name in \"Social Bias Frames\" dataset","user":{"login":"otakumesi","id":14996977,"node_id":"MDQ6VXNlcjE0OTk2OTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14996977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/otakumesi","html_url":"https:\/\/github.com\/otakumesi","followers_url":"https:\/\/api.github.com\/users\/otakumesi\/followers","following_url":"https:\/\/api.github.com\/users\/otakumesi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/otakumesi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/otakumesi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/otakumesi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/otakumesi\/orgs","repos_url":"https:\/\/api.github.com\/users\/otakumesi\/repos","events_url":"https:\/\/api.github.com\/users\/otakumesi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/otakumesi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T13:03:05Z","updated_at":"2020-12-03T13:42:26Z","closed_at":"2020-12-03T13:42:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1053","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1053","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1053.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1053.patch","merged_at":"2020-12-03T13:42:26Z"},"body":"# Why I did\r\nWhen I use \"social_bias_frames\" datasets in this library, I got 404 Errors.\r\nSo, I fixed this error and another some problems that I faced to use the dataset.\r\n\r\n# What I did\r\n* Modify this dataset URL\r\n* Modify this dataset file names\r\n* Add a \"dataSource\" column\r\n\r\nThank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1053\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1053\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1052","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1052\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1052\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1052\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1052","id":756171798,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNzU5MjA0","number":1052,"title":"add sharc dataset","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T12:57:23Z","updated_at":"2020-12-03T16:44:21Z","closed_at":"2020-12-03T14:09:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1052","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1052","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1052.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1052.patch","merged_at":"2020-12-03T14:09:54Z"},"body":"This PR adds the ShARC dataset.\r\n\r\nMore info:\r\nhttps:\/\/sharc-data.github.io\/index.html","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1052\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1052\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1051","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1051\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1051\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1051\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1051","id":756169049,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNzU2OTQy","number":1051,"title":"Add Facebook SimpleQuestionV2","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T12:53:20Z","updated_at":"2020-12-03T17:31:59Z","closed_at":"2020-12-03T17:31:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1051","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1051","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1051.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1051.patch","merged_at":"2020-12-03T17:31:58Z"},"body":"Add simple questions v2: https:\/\/research.fb.com\/downloads\/babi\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1051\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1051\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1050","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1050\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1050\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1050\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1050","id":756166728,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNzU1MDQ3","number":1050,"title":"Add GoEmotions","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T12:49:53Z","updated_at":"2020-12-03T17:37:45Z","closed_at":"2020-12-03T17:30:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1050","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1050","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1050.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1050.patch","merged_at":"2020-12-03T17:30:08Z"},"body":"Adds the GoEmotions dataset, a nice emotion classification dataset with 27 (multi-)label annotations on reddit comments. Includes both a large raw version and a narrowed version with predefined train\/test\/val splits, which I've included as separate configs with the latter as a default.\r\n\r\n- Webpage\/repo: https:\/\/github.com\/google-research\/google-research\/tree\/master\/goemotions\r\n- Paper: https:\/\/arxiv.org\/abs\/2005.00547","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1050\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1050\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1049","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1049\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1049\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1049\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1049","id":756157602,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNzQ3NDY0","number":1049,"title":"Add siswati ner corpus","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T12:36:00Z","updated_at":"2020-12-03T17:27:02Z","closed_at":"2020-12-03T17:26:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1049","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1049","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1049.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1049.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1049\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1049\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1048","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1048\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1048\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1048\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1048","id":756133072,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNzI3MDA0","number":1048,"title":"Adding NCHLT dataset","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T11:59:25Z","updated_at":"2020-12-04T13:29:57Z","closed_at":"2020-12-04T13:29:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1048","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1048","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1048.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1048.patch","merged_at":"2020-12-04T13:29:56Z"},"body":"https:\/\/repo.sadilar.org\/handle\/20.500.12185\/7\/discover?filtertype_0=database&filtertype_1=title&filter_relational_operator_1=contains&filter_relational_operator_0=equals&filter_1=&filter_0=Monolingual+Text+Corpora%3A+Annotated&filtertype=project&filter_relational_operator=equals&filter=NCHLT+Text+II","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1048\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1048\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1047","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1047\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1047\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1047\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1047","id":756127490,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNzIyMjk4","number":1047,"title":"Add KorNLU","user":{"login":"sumanthd17","id":28291870,"node_id":"MDQ6VXNlcjI4MjkxODcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28291870?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sumanthd17","html_url":"https:\/\/github.com\/sumanthd17","followers_url":"https:\/\/api.github.com\/users\/sumanthd17\/followers","following_url":"https:\/\/api.github.com\/users\/sumanthd17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sumanthd17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sumanthd17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sumanthd17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sumanthd17\/orgs","repos_url":"https:\/\/api.github.com\/users\/sumanthd17\/repos","events_url":"https:\/\/api.github.com\/users\/sumanthd17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sumanthd17\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-03T11:50:54Z","updated_at":"2020-12-03T17:17:07Z","closed_at":"2020-12-03T17:16:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1047","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1047","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1047.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1047.patch","merged_at":null},"body":"Added Korean NLU datasets. The link to the dataset can be found [here](https:\/\/github.com\/kakaobrain\/KorNLUDatasets) and the paper can be found [here](https:\/\/arxiv.org\/abs\/2004.03289)\r\n\r\n**Note**: The MNLI tsv file is broken, so this code currently excludes the file. Please suggest other alternative if any @lhoestq \r\n\r\n- [x] Followed the instructions in CONTRIBUTING.md\r\n- [x] Ran the tests successfully\r\n- [x] Created the dummy data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1047\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1047\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1046","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1046\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1046\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1046\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1046","id":756122709,"node_id":"MDU6SXNzdWU3NTYxMjI3MDk=","number":1046,"title":"Dataset.map() turns tensors into lists?","user":{"login":"tombosc","id":5270804,"node_id":"MDQ6VXNlcjUyNzA4MDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5270804?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tombosc","html_url":"https:\/\/github.com\/tombosc","followers_url":"https:\/\/api.github.com\/users\/tombosc\/followers","following_url":"https:\/\/api.github.com\/users\/tombosc\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tombosc\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tombosc\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tombosc\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tombosc\/orgs","repos_url":"https:\/\/api.github.com\/users\/tombosc\/repos","events_url":"https:\/\/api.github.com\/users\/tombosc\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tombosc\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-03T11:43:46Z","updated_at":"2020-12-23T13:51:12Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I apply `Dataset.map()` to a function that returns a dict of torch tensors (like a tokenizer from the repo transformers). However, in the mapped dataset, these tensors have turned to lists!\r\n\r\n```import datasets\r\nimport torch \r\nfrom datasets import load_dataset \r\nprint(\"version datasets\", datasets.__version__)\r\n\r\ndataset = load_dataset(\"snli\", split='train[0:50]') \r\n\r\ndef tokenizer_fn(example):\r\n # actually uses a tokenizer which does something like:\r\n return {'input_ids': torch.tensor([[0, 1, 2]])}\r\n\r\nprint(\"First item in dataset:\\n\", dataset[0])\r\ntokenized = tokenizer_fn(dataset[0])\r\nprint(\"Tokenized hyp:\\n\", tokenized)\r\ndataset_tok = dataset.map(tokenizer_fn, batched=False,\r\n remove_columns=['label', 'premise', 'hypothesis'])\r\nprint(\"Tokenized using map:\\n\", dataset_tok[0])\r\nprint(type(tokenized['input_ids']), type(dataset_tok[0]['input_ids']))\r\ndataset_tok = dataset.map(tokenizer_fn, batched=False,\r\n remove_columns=['label', 'premise', 'hypothesis'])\r\nprint(\"Tokenized using map:\\n\", dataset_tok[0])\r\nprint(type(tokenized['input_ids']), type(dataset_tok[0]['input_ids']))\r\n```\r\n\r\nThe output is:\r\n\r\n```\r\nversion datasets 1.1.3\r\nReusing dataset snli (\/home\/tom\/.cache\/huggingface\/datasets\/snli\/plain_text\/1.0.0\/bb1102591c6230bd78813e229d5dd4c7fbf4fc478cec28f298761eb69e5b537c)\r\nFirst item in dataset:\r\n {'premise': 'A person on a horse jumps over a broken down airplane.', 'hypothesis': 'A person is training his horse for a competition.', 'label': 1}\r\nTokenized hyp:\r\n {'input_ids': tensor([[0, 1, 2]])}\r\nLoading cached processed dataset at \/home\/tom\/.cache\/huggingface\/datasets\/snli\/plain_text\/1.0.0\/bb1102591c6230bd78813e229d5dd4c7fbf4fc478cec28f298761eb69e5b537c\/cache-fe38f449fe9ac46f.arrow\r\nTokenized using map:\r\n {'input_ids': [[0, 1, 2]]}\r\n \r\n```\r\n\r\nOr am I doing something wrong?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1046\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1046\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1045","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1045\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1045\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1045\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1045","id":756120760,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNzE2NzIy","number":1045,"title":"Add xitsonga ner corpus","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T11:40:48Z","updated_at":"2020-12-03T17:20:03Z","closed_at":"2020-12-03T17:19:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1045","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1045","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1045.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1045.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1045\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1045\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1044","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1044\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1044\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1044\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1044","id":756111647,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNzA5MTg0","number":1044,"title":"Add AMTTL Chinese Word Segmentation Dataset","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T11:27:52Z","updated_at":"2020-12-03T17:13:14Z","closed_at":"2020-12-03T17:13:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1044","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1044","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1044.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1044.patch","merged_at":"2020-12-03T17:13:13Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1044\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1044\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1043","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1043\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1043\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1043\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1043","id":756100717,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNzAwMDQ1","number":1043,"title":"Add TSAC: Tunisian Sentiment Analysis Corpus","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T11:12:35Z","updated_at":"2020-12-03T13:35:05Z","closed_at":"2020-12-03T13:32:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1043","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1043","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1043.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1043.patch","merged_at":"2020-12-03T13:32:24Z"},"body":"github: https:\/\/github.com\/fbougares\/TSAC\r\n\r\npaper: https:\/\/www.aclweb.org\/anthology\/W17-1307\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1043\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1043\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1042","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1042\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1042\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1042\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1042","id":756097583,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNjk3NDU4","number":1042,"title":"Add Big Patent dataset","user":{"login":"mattbui","id":46804938,"node_id":"MDQ6VXNlcjQ2ODA0OTM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46804938?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mattbui","html_url":"https:\/\/github.com\/mattbui","followers_url":"https:\/\/api.github.com\/users\/mattbui\/followers","following_url":"https:\/\/api.github.com\/users\/mattbui\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mattbui\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mattbui\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mattbui\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mattbui\/orgs","repos_url":"https:\/\/api.github.com\/users\/mattbui\/repos","events_url":"https:\/\/api.github.com\/users\/mattbui\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mattbui\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-03T11:07:59Z","updated_at":"2020-12-04T04:38:26Z","closed_at":"2020-12-04T04:38:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1042","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1042","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1042.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1042.patch","merged_at":null},"body":"- More info on the dataset: https:\/\/evasharma.github.io\/bigpatent\/\r\n- There's another raw version of the dataset available from tfds. However, they're quite large so I don't have the resources to fully test all the configs for that version yet. We'll try to add it later.\r\n- ~Currently, there are no dummy data for this dataset yet as I'm facing some problems with generating them. I'm trying to add them later.~","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1042\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1042\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1041","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1041\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1041\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1041\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1041","id":756055102,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNjYyMDI0","number":1041,"title":"Add SuperGLUE metric","user":{"login":"calpt","id":36051308,"node_id":"MDQ6VXNlcjM2MDUxMzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36051308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/calpt","html_url":"https:\/\/github.com\/calpt","followers_url":"https:\/\/api.github.com\/users\/calpt\/followers","following_url":"https:\/\/api.github.com\/users\/calpt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/calpt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/calpt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/calpt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/calpt\/orgs","repos_url":"https:\/\/api.github.com\/users\/calpt\/repos","events_url":"https:\/\/api.github.com\/users\/calpt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/calpt\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T10:11:34Z","updated_at":"2021-02-23T19:02:59Z","closed_at":"2021-02-23T18:02:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1041","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1041","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1041.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1041.patch","merged_at":"2021-02-23T18:02:12Z"},"body":"Adds a new metric for the SuperGLUE benchmark (similar to the GLUE benchmark metric).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1041\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1041\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1040","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1040\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1040\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1040\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1040","id":756050387,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNjU4MTU3","number":1040,"title":"Add UN Universal Declaration of Human Rights (UDHR)","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T10:04:58Z","updated_at":"2020-12-03T19:20:15Z","closed_at":"2020-12-03T19:20:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1040","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1040","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1040.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1040.patch","merged_at":"2020-12-03T19:20:11Z"},"body":"Universal declaration of human rights with translations in 464 languages and dialects.\r\n\r\n- UN page: https:\/\/www.ohchr.org\/EN\/UDHR\/Pages\/UDHRIndex.aspx\r\n- Raw data source: https:\/\/unicode.org\/udhr\/index.html\r\n\r\nEach instance of the dataset corresponds to one translation of the document. Since there's only one instance per language (and because there are 500 languages so the dummy data would be messy), I opted to just include them all under the same single config. I wasn't able to find any kind of license so I just copied the copyright notice.\r\n\r\nI was pretty careful careful generating the language tags so they _should_ all be correct & consistent BCP-47 codes per the docs.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1040\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1040\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1039","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1039\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1039\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1039\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1039","id":756000478,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNjE3MDI2","number":1039,"title":"Update ADD NEW DATASET","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T08:58:32Z","updated_at":"2020-12-03T09:18:28Z","closed_at":"2020-12-03T09:18:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1039","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1039","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1039.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1039.patch","merged_at":"2020-12-03T09:18:09Z"},"body":"This PR adds a couple of detail on cloning\/rebasing the repo.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1039\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1039\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1038","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1038\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1038\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1038\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1038","id":755987997,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNjA2Njgw","number":1038,"title":"add med_hop","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T08:40:27Z","updated_at":"2020-12-03T16:53:13Z","closed_at":"2020-12-03T16:52:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1038","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1038","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1038.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1038.patch","merged_at":"2020-12-03T16:52:23Z"},"body":"This PR adds the MedHop dataset from the QAngaroo multi hop reading comprehension datasets\r\n\r\nMore info:\r\nhttp:\/\/qangaroo.cs.ucl.ac.uk\/index.html","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1038\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1038\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1037","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1037\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1037\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1037\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1037","id":755975586,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNTk2NDkx","number":1037,"title":"Fix docs indentation issues","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-03T08:21:34Z","updated_at":"2020-12-22T16:01:15Z","closed_at":"2020-12-22T16:01:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1037","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1037","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1037.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1037.patch","merged_at":"2020-12-22T16:01:14Z"},"body":"Replace tabs with spaces.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1037\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1037\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1036","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1036\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1036\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1036\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1036","id":755953294,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNTc4MjQ4","number":1036,"title":"Add PerSenT","user":{"login":"jeromeku","id":2455711,"node_id":"MDQ6VXNlcjI0NTU3MTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2455711?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jeromeku","html_url":"https:\/\/github.com\/jeromeku","followers_url":"https:\/\/api.github.com\/users\/jeromeku\/followers","following_url":"https:\/\/api.github.com\/users\/jeromeku\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jeromeku\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jeromeku\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jeromeku\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jeromeku\/orgs","repos_url":"https:\/\/api.github.com\/users\/jeromeku\/repos","events_url":"https:\/\/api.github.com\/users\/jeromeku\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jeromeku\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-03T07:43:58Z","updated_at":"2020-12-14T13:40:43Z","closed_at":"2020-12-14T13:40:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1036","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1036","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1036.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1036.patch","merged_at":null},"body":"Added [Person's SentimenT](https:\/\/stonybrooknlp.github.io\/PerSenT\/) dataset. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1036\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1036\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1035","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1035\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1035\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1035\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1035","id":755947097,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNTczMTc3","number":1035,"title":"add wiki_hop","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T07:32:26Z","updated_at":"2020-12-03T16:43:40Z","closed_at":"2020-12-03T16:41:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1035","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1035","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1035.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1035.patch","merged_at":"2020-12-03T16:41:12Z"},"body":"This PR adds the WikiHop dataset from the QAngaroo multi hop reading comprehension datasets\r\n\r\nMore info:\r\nhttp:\/\/qangaroo.cs.ucl.ac.uk\/index.html\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1035\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1035\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1034","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1034\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1034\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1034\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1034","id":755936327,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNTY0MjA0","number":1034,"title":"add scb_mt_enth_2020","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T07:13:49Z","updated_at":"2020-12-03T16:57:23Z","closed_at":"2020-12-03T16:57:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1034","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1034","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1034.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1034.patch","merged_at":"2020-12-03T16:57:23Z"},"body":"## scb-mt-en-th-2020: A Large English-Thai Parallel Corpus\r\n\r\nThe primary objective of our work is to build a large-scale English-Thai dataset for machine translation.\r\nWe construct an English-Thai machine translation dataset with over 1 million segment pairs, curated from various sources,\r\nnamely news, Wikipedia articles, SMS messages, task-based dialogs, web-crawled data and government documents.\r\nMethodology for gathering data, building parallel texts and removing noisy sentence pairs are presented in a reproducible manner.\r\nWe train machine translation models based on this dataset. Our models' performance are comparable to that of\r\nGoogle Translation API (as of May 2020) for Thai-English and outperform Google when the Open Parallel Corpus (OPUS) is\r\nincluded in the training data for both Thai-English and English-Thai translation.\r\nThe dataset, pre-trained models, and source code to reproduce our work are available for public use.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1034\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1034\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1033","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1033\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1033\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1033\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1033","id":755921927,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNTUxNzYw","number":1033,"title":"Add support for \".txm\" format","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-03T06:52:08Z","updated_at":"2021-02-21T19:47:11Z","closed_at":"2021-02-21T19:47:11Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1033","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1033","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1033.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1033.patch","merged_at":"2021-02-21T19:47:11Z"},"body":"In dummy data generation, add support for XML-like \".txm\" file format.\r\n\r\nAlso support filenames with additional compression extension: \".txm.gz\".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1033\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1033\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1032","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1032\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1032\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1032\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1032","id":755858785,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNDk2MTU2","number":1032,"title":"IIT B English to Hindi machine translation dataset","user":{"login":"spatil6","id":6419011,"node_id":"MDQ6VXNlcjY0MTkwMTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6419011?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/spatil6","html_url":"https:\/\/github.com\/spatil6","followers_url":"https:\/\/api.github.com\/users\/spatil6\/followers","following_url":"https:\/\/api.github.com\/users\/spatil6\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/spatil6\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/spatil6\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/spatil6\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/spatil6\/orgs","repos_url":"https:\/\/api.github.com\/users\/spatil6\/repos","events_url":"https:\/\/api.github.com\/users\/spatil6\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/spatil6\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-03T05:18:45Z","updated_at":"2021-01-10T08:44:51Z","closed_at":"2021-01-10T08:44:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1032","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1032","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1032.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1032.patch","merged_at":null},"body":"Adding IIT Bombay English-Hindi Corpus dataset\r\nmore info : http:\/\/www.cfilt.iitb.ac.in\/iitb_parallel\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1032\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1032\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1031","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1031\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1031\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1031\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1031","id":755844004,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNDgyMzEy","number":1031,"title":"add crows_pairs","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-03T05:05:11Z","updated_at":"2020-12-03T18:29:52Z","closed_at":"2020-12-03T18:29:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1031","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1031","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1031.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1031.patch","merged_at":"2020-12-03T18:29:39Z"},"body":"This PR adds CrowS-Pairs datasets.\r\n\r\nMore info:\r\nhttps:\/\/github.com\/nyu-mll\/crows-pairs\/\r\nhttps:\/\/arxiv.org\/pdf\/2010.00133.pdf","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1031\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1031\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1030","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1030\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1030\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1030\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1030","id":755777438,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNDI0MDM3","number":1030,"title":"allegro_reviews dataset ","user":{"login":"abecadel","id":1654113,"node_id":"MDQ6VXNlcjE2NTQxMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1654113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abecadel","html_url":"https:\/\/github.com\/abecadel","followers_url":"https:\/\/api.github.com\/users\/abecadel\/followers","following_url":"https:\/\/api.github.com\/users\/abecadel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abecadel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abecadel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abecadel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abecadel\/orgs","repos_url":"https:\/\/api.github.com\/users\/abecadel\/repos","events_url":"https:\/\/api.github.com\/users\/abecadel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abecadel\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-03T03:11:39Z","updated_at":"2020-12-04T10:56:29Z","closed_at":"2020-12-03T16:34:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1030","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1030","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1030.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1030.patch","merged_at":"2020-12-03T16:34:46Z"},"body":"- **Name:** *allegro_reviews*\r\n- **Description:** *Allegro Reviews is a sentiment analysis dataset, consisting of 11,588 product reviews written in Polish and extracted from Allegro.pl - a popular e-commerce marketplace. Each review contains at least 50 words and has a rating on a scale from one (negative review) to five (positive review).*\r\n- **Data:** *https:\/\/github.com\/allegro\/klejbenchmark-allegroreviews*\r\n- **Motivation:** *The KLEJ benchmark (Kompleksowa Lista Ewaluacji J\u0119zykowych) is a set of nine evaluation tasks for the Polish language understanding.*","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1030\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1030\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1029","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1029\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1029\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1029\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1029","id":755767616,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxNDE2NzE4","number":1029,"title":"Add PEC","user":{"login":"zhongpeixiang","id":11826803,"node_id":"MDQ6VXNlcjExODI2ODAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11826803?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zhongpeixiang","html_url":"https:\/\/github.com\/zhongpeixiang","followers_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/followers","following_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/orgs","repos_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/repos","events_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-03T02:46:08Z","updated_at":"2020-12-04T10:58:19Z","closed_at":"2020-12-03T16:15:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1029","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1029","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1029.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1029.patch","merged_at":"2020-12-03T16:15:06Z"},"body":"A persona-based empathetic conversation dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1029\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1029\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1028","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1028\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1028\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1028\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1028","id":755712854,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMzc0MTYw","number":1028,"title":"Add ASSET dataset for text simplification evaluation","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-03T00:28:29Z","updated_at":"2020-12-17T10:03:06Z","closed_at":"2020-12-03T16:34:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1028","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1028","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1028.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1028.patch","merged_at":"2020-12-03T16:34:37Z"},"body":"Adding the ASSET dataset from https:\/\/github.com\/facebookresearch\/asset\r\n\r\nOne config for the simplification data, one for the human ratings of quality.\r\n\r\nThe README.md borrows from that written by @juand-r","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1028\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1028\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1027","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1027\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1027\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1027\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1027","id":755695420,"node_id":"MDU6SXNzdWU3NTU2OTU0MjA=","number":1027,"title":"Hi","user":{"login":"suemori87","id":75398394,"node_id":"MDQ6VXNlcjc1Mzk4Mzk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75398394?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/suemori87","html_url":"https:\/\/github.com\/suemori87","followers_url":"https:\/\/api.github.com\/users\/suemori87\/followers","following_url":"https:\/\/api.github.com\/users\/suemori87\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/suemori87\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/suemori87\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/suemori87\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/suemori87\/orgs","repos_url":"https:\/\/api.github.com\/users\/suemori87\/repos","events_url":"https:\/\/api.github.com\/users\/suemori87\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/suemori87\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T23:47:14Z","updated_at":"2020-12-03T16:42:41Z","closed_at":"2020-12-03T16:42:41Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1027\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1027\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1026","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1026\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1026\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1026\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1026","id":755689195,"node_id":"MDU6SXNzdWU3NTU2ODkxOTU=","number":1026,"title":"L\u00edo o","user":{"login":"Isaias0","id":73465581,"node_id":"MDQ6VXNlcjczNDY1NTgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73465581?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Isaias0","html_url":"https:\/\/github.com\/Isaias0","followers_url":"https:\/\/api.github.com\/users\/Isaias0\/followers","following_url":"https:\/\/api.github.com\/users\/Isaias0\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Isaias0\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Isaias0\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Isaias0\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Isaias0\/orgs","repos_url":"https:\/\/api.github.com\/users\/Isaias0\/repos","events_url":"https:\/\/api.github.com\/users\/Isaias0\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Isaias0\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T23:32:25Z","updated_at":"2020-12-03T16:42:47Z","closed_at":"2020-12-03T16:42:47Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"````l`````````\n\n```\nO\n```\n`````\n\u00d1o\n```\n````\n\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1026\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1026\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1025","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1025\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1025\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1025\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1025","id":755673371,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMzQxNjE5","number":1025,"title":"Add Sesotho Ner","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-02T23:00:15Z","updated_at":"2020-12-16T16:27:03Z","closed_at":"2020-12-16T16:27:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1025","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1025","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1025.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1025.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1025\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1025\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1024","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1024\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1024\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1024\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1024","id":755664113,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMzMzOTc5","number":1024,"title":"Add ZEST: ZEroShot learning from Task descriptions","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T22:41:20Z","updated_at":"2020-12-03T19:21:00Z","closed_at":"2020-12-03T16:09:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1024","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1024","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1024.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1024.patch","merged_at":"2020-12-03T16:09:14Z"},"body":"Adds the ZEST dataset on zero-shot learning from task descriptions from AI2.\r\n\r\n- Webpage: https:\/\/allenai.org\/data\/zest\r\n- Paper: https:\/\/arxiv.org\/abs\/2011.08115\r\n\r\nThe nature of this dataset made the supported task tags tricky if you wouldn't mind giving any feedback @yjernite. Also let me know if you think we should have a `other-task-generalization` or something like that...","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1024\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1024\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1023","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1023\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1023\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1023\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1023","id":755655752,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMzI3MTMy","number":1023,"title":"Add Schema Guided Dialogue dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T22:26:01Z","updated_at":"2020-12-03T01:18:01Z","closed_at":"2020-12-03T01:18:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1023","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1023","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1023.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1023.patch","merged_at":"2020-12-03T01:18:01Z"},"body":"This PR adds the Schema Guided Dialogue dataset created for the DSTC8 challenge\r\n- https:\/\/github.com\/google-research-datasets\/dstc8-schema-guided-dialogue\r\n\r\nA bit simpler than MultiWOZ, the only tricky thing is the sequence of dictionaries that had to be linearized. There is a config for the data proper, and a config for the schemas.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1023\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1023\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1022","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1022\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1022\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1022\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1022","id":755651377,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMzIzNTkw","number":1022,"title":"add MRQA","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T22:17:56Z","updated_at":"2020-12-04T00:34:26Z","closed_at":"2020-12-04T00:34:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1022","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1022","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1022.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1022.patch","merged_at":"2020-12-04T00:34:24Z"},"body":"MRQA (shared task 2019)\r\nout of distribution generalization\r\nFramed as extractive question answering\r\nDataset is the concatenation (of subsets) of existing QA datasets processed to match the SQuAD format","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1022\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1022\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1021","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1021\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1021\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1021\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1021","id":755644559,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMzE4MTQw","number":1021,"title":"Add Gutenberg time references dataset","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T22:05:26Z","updated_at":"2020-12-03T10:33:39Z","closed_at":"2020-12-03T10:33:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1021","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1021","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1021.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1021.patch","merged_at":"2020-12-03T10:33:38Z"},"body":"This PR adds the gutenberg_time dataset: https:\/\/arxiv.org\/abs\/2011.04124","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1021\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1021\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1020","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1020\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1020\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1020\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1020","id":755601450,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMjgyODQy","number":1020,"title":"Add Setswana NER","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T20:52:07Z","updated_at":"2020-12-03T14:56:14Z","closed_at":"2020-12-03T14:56:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1020","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1020","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1020.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1020.patch","merged_at":"2020-12-03T14:56:14Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1020\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1020\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1019","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1019\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1019\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1019\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1019","id":755582090,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMjY2NzAz","number":1019,"title":"Add caWaC dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T20:18:55Z","updated_at":"2020-12-03T14:47:09Z","closed_at":"2020-12-03T14:47:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1019","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1019","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1019.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1019.patch","merged_at":"2020-12-03T14:47:09Z"},"body":"Add dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1019\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1019\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1018","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1018\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1018\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1018\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1018","id":755570882,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMjU3NTU2","number":1018,"title":"Add Sepedi NER","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T20:01:05Z","updated_at":"2020-12-03T21:47:03Z","closed_at":"2020-12-03T21:46:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1018","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1018","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1018.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1018.patch","merged_at":null},"body":"This is a new branch created for this dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1018\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1018\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1017","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1017\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1017\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1017\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1017","id":755558175,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMjQ3MDE2","number":1017,"title":"Specify file encoding","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T19:40:45Z","updated_at":"2020-12-03T00:44:25Z","closed_at":"2020-12-03T00:44:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1017","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1017","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1017.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1017.patch","merged_at":"2020-12-03T00:44:25Z"},"body":"If not specified, Python uses system default, which for Windows is not \"utf-8\".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1017\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1017\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1016","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1016\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1016\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1016\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1016","id":755521862,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMjE3MjM3","number":1016,"title":"Add CLINC150 dataset","user":{"login":"sumanthd17","id":28291870,"node_id":"MDQ6VXNlcjI4MjkxODcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28291870?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sumanthd17","html_url":"https:\/\/github.com\/sumanthd17","followers_url":"https:\/\/api.github.com\/users\/sumanthd17\/followers","following_url":"https:\/\/api.github.com\/users\/sumanthd17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sumanthd17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sumanthd17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sumanthd17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sumanthd17\/orgs","repos_url":"https:\/\/api.github.com\/users\/sumanthd17\/repos","events_url":"https:\/\/api.github.com\/users\/sumanthd17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sumanthd17\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T18:44:30Z","updated_at":"2020-12-03T10:32:04Z","closed_at":"2020-12-03T10:32:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1016","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1016","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1016.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1016.patch","merged_at":"2020-12-03T10:32:04Z"},"body":"Added CLINC150 Dataset. The link to the dataset can be found [here](https:\/\/github.com\/clinc\/oos-eval) and the paper can be found [here](https:\/\/www.aclweb.org\/anthology\/D19-1131.pdf)\r\n\r\n- [x] Followed the instructions in CONTRIBUTING.md\r\n- [x] Ran the tests successfully\r\n- [x] Created the dummy data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1016\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1016\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1015","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1015\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1015\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1015\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1015","id":755508841,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMjA2MTgy","number":1015,"title":"add hard dataset","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T18:27:36Z","updated_at":"2020-12-03T15:03:54Z","closed_at":"2020-12-03T15:03:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1015","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1015","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1015.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1015.patch","merged_at":"2020-12-03T15:03:54Z"},"body":"Hotel Reviews in Arabic language.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1015\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1015\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1014","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1014\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1014\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1014\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1014","id":755505851,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMjAzNzAz","number":1014,"title":"Add SciTLDR Dataset (Take 2)","user":{"login":"Bharat123rox","id":13381361,"node_id":"MDQ6VXNlcjEzMzgxMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13381361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Bharat123rox","html_url":"https:\/\/github.com\/Bharat123rox","followers_url":"https:\/\/api.github.com\/users\/Bharat123rox\/followers","following_url":"https:\/\/api.github.com\/users\/Bharat123rox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Bharat123rox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Bharat123rox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Bharat123rox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Bharat123rox\/orgs","repos_url":"https:\/\/api.github.com\/users\/Bharat123rox\/repos","events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-02T18:22:50Z","updated_at":"2020-12-02T18:55:10Z","closed_at":"2020-12-02T18:37:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1014","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1014","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1014.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1014.patch","merged_at":"2020-12-02T18:37:58Z"},"body":"Adds the SciTLDR Dataset by AI2\r\nAdded the `README.md` card with tags to the best of my knowledge\r\n\r\nMulti-target summaries or TLDRs of Scientific Documents\r\n\r\nContinued from #986 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1014\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1014\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1013","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1013\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1013\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1013\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1013","id":755493075,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMTkzMTcy","number":1013,"title":"Adding CS restaurants dataset","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T18:02:30Z","updated_at":"2020-12-02T18:25:20Z","closed_at":"2020-12-02T18:25:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1013","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1013","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1013.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1013.patch","merged_at":"2020-12-02T18:25:19Z"},"body":"This PR adds the CS restaurants dataset; this is a re-opening of a previous PR with a chaotic commit history.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1013\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1013\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1012","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1012\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1012\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1012\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1012","id":755485658,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMTg3MTI2","number":1012,"title":"Adding Evidence Inference Data:","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T17:51:35Z","updated_at":"2020-12-03T15:04:46Z","closed_at":"2020-12-03T15:04:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1012","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1012","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1012.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1012.patch","merged_at":"2020-12-03T15:04:46Z"},"body":"http:\/\/evidence-inference.ebm-nlp.com\/download\/\nhttps:\/\/arxiv.org\/pdf\/2005.04177.pdf","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1012\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1012\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1011","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1011\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1011\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1011\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1011","id":755463726,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMTY5MjA3","number":1011,"title":"Add Bilingual Corpus of Arabic-English Parallel Tweets","user":{"login":"sumanthd17","id":28291870,"node_id":"MDQ6VXNlcjI4MjkxODcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28291870?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sumanthd17","html_url":"https:\/\/github.com\/sumanthd17","followers_url":"https:\/\/api.github.com\/users\/sumanthd17\/followers","following_url":"https:\/\/api.github.com\/users\/sumanthd17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sumanthd17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sumanthd17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sumanthd17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sumanthd17\/orgs","repos_url":"https:\/\/api.github.com\/users\/sumanthd17\/repos","events_url":"https:\/\/api.github.com\/users\/sumanthd17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sumanthd17\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-12-02T17:20:02Z","updated_at":"2020-12-04T14:45:10Z","closed_at":"2020-12-04T14:44:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1011","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1011","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1011.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1011.patch","merged_at":"2020-12-04T14:44:33Z"},"body":"Added Bilingual Corpus of Arabic-English Parallel Tweets. The link to the dataset can be found [here](https:\/\/alt.qcri.org\/wp-content\/uploads\/2020\/08\/Bilingual-Corpus-of-Arabic-English-Parallel-Tweets.zip) and the paper can be found [here](https:\/\/www.aclweb.org\/anthology\/2020.bucc-1.3.pdf)\r\n\r\n- [x] Followed the instructions in CONTRIBUTING.md\r\n- [x] Ran the tests successfully\r\n- [x] Created the dummy data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1011\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1011\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1010","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1010\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1010\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1010\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1010","id":755432143,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMTQzMzAx","number":1010,"title":"Add NoReC: Norwegian Review Corpus","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T16:38:29Z","updated_at":"2021-02-18T14:47:29Z","closed_at":"2021-02-18T14:47:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1010","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1010","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1010.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1010.patch","merged_at":"2021-02-18T14:47:28Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1010\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1010\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1009","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1009\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1009\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1009\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1009","id":755384433,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMTA0NDc5","number":1009,"title":"Adding C3 dataset: the first free-form multiple-Choice Chinese machine reading Comprehension dataset.","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T15:40:36Z","updated_at":"2020-12-03T13:16:30Z","closed_at":"2020-12-03T13:16:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1009","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1009","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1009.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1009.patch","merged_at":"2020-12-03T13:16:29Z"},"body":"https:\/\/github.com\/nlpdata\/c3\nhttps:\/\/arxiv.org\/abs\/1904.09679","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1009\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1009\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1008","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1008\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1008\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1008\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1008","id":755372798,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMDk1ODQy","number":1008,"title":"Adding C3 dataset: the first free-form multiple-Choice Chinese machine reading Comprehension dataset. https:\/\/github.com\/nlpdata\/c3 https:\/\/arxiv.org\/abs\/1904.09679","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T15:28:05Z","updated_at":"2020-12-02T15:40:55Z","closed_at":"2020-12-02T15:40:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1008","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1008","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1008.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1008.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1008\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1008\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1007","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1007\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1007\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1007\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1007","id":755364078,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMDg4NTk5","number":1007,"title":"Include license file in source distribution","user":{"login":"synapticarbors","id":589279,"node_id":"MDQ6VXNlcjU4OTI3OQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/589279?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/synapticarbors","html_url":"https:\/\/github.com\/synapticarbors","followers_url":"https:\/\/api.github.com\/users\/synapticarbors\/followers","following_url":"https:\/\/api.github.com\/users\/synapticarbors\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/synapticarbors\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/synapticarbors\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/synapticarbors\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/synapticarbors\/orgs","repos_url":"https:\/\/api.github.com\/users\/synapticarbors\/repos","events_url":"https:\/\/api.github.com\/users\/synapticarbors\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/synapticarbors\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T15:17:43Z","updated_at":"2020-12-02T17:58:05Z","closed_at":"2020-12-02T17:58:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1007","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1007","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1007.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1007.patch","merged_at":"2020-12-02T17:58:05Z"},"body":"It would be helpful to include the license file in the source distribution.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1007\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1007\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1006","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1006\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1006\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1006\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1006","id":755362766,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMDg3NTIy","number":1006,"title":"add yahoo_answers_topics","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T15:16:13Z","updated_at":"2020-12-03T16:44:38Z","closed_at":"2020-12-02T18:01:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1006","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1006","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1006.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1006.patch","merged_at":"2020-12-02T18:01:32Z"},"body":"This PR adds yahoo answers topic classification dataset.\r\n\r\nMore info:\r\nhttps:\/\/github.com\/LC-John\/Yahoo-Answers-Topic-Classification-Dataset\r\n\r\ncc @joeddav, @yjernite ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1006\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1006\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1005","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1005\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1005\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1005\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1005","id":755337255,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMDY3Mjc5","number":1005,"title":"Adding Autshumato South african langages:","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T14:47:33Z","updated_at":"2020-12-03T13:13:30Z","closed_at":"2020-12-03T13:13:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1005","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1005","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1005.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1005.patch","merged_at":"2020-12-03T13:13:30Z"},"body":"https:\/\/repo.sadilar.org\/handle\/20.500.12185\/7\/discover?filtertype=database&filter_relational_operator=equals&filter=Multilingual+Text+Corpora%3A+Aligned","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1005\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1005\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1004","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1004\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1004\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1004\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1004","id":755325368,"node_id":"MDU6SXNzdWU3NTUzMjUzNjg=","number":1004,"title":"how large datasets are handled under the hood ","user":{"login":"rabeehkarimimahabadi","id":73364383,"node_id":"MDQ6VXNlcjczMzY0Mzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73364383?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi","html_url":"https:\/\/github.com\/rabeehkarimimahabadi","followers_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-02T14:32:40Z","updated_at":"2021-02-01T10:23:51Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI want to use multiple large datasets with a mapping style dataloader, where they cannot fit into memory, could you tell me how you handled the datasets under the hood? is this you bring all in memory in case of mapping style ones? or is this some sharding under the hood and you bring in memory when necessary, thanks ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1004\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1004\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1003","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1003\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1003\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1003\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1003","id":755310318,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMDQ1NDcy","number":1003,"title":"Add multi_x_science_sum","user":{"login":"moussaKam","id":28675016,"node_id":"MDQ6VXNlcjI4Njc1MDE2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28675016?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/moussaKam","html_url":"https:\/\/github.com\/moussaKam","followers_url":"https:\/\/api.github.com\/users\/moussaKam\/followers","following_url":"https:\/\/api.github.com\/users\/moussaKam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/moussaKam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/moussaKam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/moussaKam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/moussaKam\/orgs","repos_url":"https:\/\/api.github.com\/users\/moussaKam\/repos","events_url":"https:\/\/api.github.com\/users\/moussaKam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/moussaKam\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T14:14:01Z","updated_at":"2020-12-02T17:39:05Z","closed_at":"2020-12-02T17:39:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1003","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1003","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1003.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1003.patch","merged_at":"2020-12-02T17:39:05Z"},"body":"Add Multi-XScience Dataset. \r\n\r\ngithub repo: https:\/\/github.com\/yaolu\/Multi-XScience\r\npaper: [Multi-XScience: A Large-scale Dataset for Extreme Multi-document Summarization of Scientific Articles](https:\/\/arxiv.org\/abs\/2010.14235)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1003\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1003\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1002","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1002\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1002\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1002\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1002","id":755309758,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMDQ1MDIx","number":1002,"title":"Adding Medal: MeDAL: Medical Abbreviation Disambiguation Dataset for Natural Language Understanding Pretraining","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-02T14:13:17Z","updated_at":"2020-12-07T16:58:03Z","closed_at":"2020-12-03T13:14:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1002","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1002","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1002.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1002.patch","merged_at":"2020-12-03T13:14:33Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1002\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1002\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1001","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1001\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1001\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1001\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1001","id":755309071,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMDQ0NDQ0","number":1001,"title":"Adding Medal: MeDAL: Medical Abbreviation Disambiguation Dataset for Natural Language Understanding Pretraining","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T14:12:30Z","updated_at":"2020-12-02T14:13:12Z","closed_at":"2020-12-02T14:13:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1001","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1001","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1001.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1001.patch","merged_at":null},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1001\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1001\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1000","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1000\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1000\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1000\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1000","id":755292066,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMxMDMxMTE1","number":1000,"title":"UM005: Urdu <> English Translation Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T13:51:35Z","updated_at":"2020-12-04T15:34:30Z","closed_at":"2020-12-04T15:34:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1000","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1000","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1000.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1000.patch","merged_at":"2020-12-04T15:34:29Z"},"body":"Adds Urdu-English dataset for machine translation: http:\/\/ufal.ms.mff.cuni.cz\/umc\/005-en-ur\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1000\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1000\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/999","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/999\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/999\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/999\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/999","id":755246786,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwOTk1MTY3","number":999,"title":"add generated_reviews_enth","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T12:50:43Z","updated_at":"2020-12-03T11:17:28Z","closed_at":"2020-12-03T11:17:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/999","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/999","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/999.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/999.patch","merged_at":"2020-12-03T11:17:28Z"},"body":"`generated_reviews_enth` is created as part of [scb-mt-en-th-2020](https:\/\/arxiv.org\/pdf\/2007.03541.pdf) for machine translation task. This dataset (referred to as `generated_reviews_yn` in [scb-mt-en-th-2020](https:\/\/arxiv.org\/pdf\/2007.03541.pdf)) are English product reviews generated by [CTRL](https:\/\/arxiv.org\/abs\/1909.05858), translated by Google Translate API and annotated as accepted or rejected (`correct`) based on fluency and adequacy of the translation by human annotators. This allows it to be used for English-to-Thai translation quality esitmation (binary label), machine translation, and sentiment analysis.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/999\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/999\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/998","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/998\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/998\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/998\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/998","id":755235356,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwOTg2MTQ3","number":998,"title":"adding yahoo_answers_qa","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T12:33:54Z","updated_at":"2020-12-02T13:45:40Z","closed_at":"2020-12-02T13:26:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/998","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/998","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/998.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/998.patch","merged_at":"2020-12-02T13:26:06Z"},"body":"Adding Yahoo Answers QA dataset.\r\n\r\nMore info:\r\nhttps:\/\/ciir.cs.umass.edu\/downloads\/nfL6\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/998\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/998\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/997","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/997\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/997\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/997\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/997","id":755185517,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwOTQ2MTIy","number":997,"title":"Microsoft CodeXGlue","user":{"login":"madlag","id":272253,"node_id":"MDQ6VXNlcjI3MjI1Mw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/272253?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/madlag","html_url":"https:\/\/github.com\/madlag","followers_url":"https:\/\/api.github.com\/users\/madlag\/followers","following_url":"https:\/\/api.github.com\/users\/madlag\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/madlag\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/madlag\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/madlag\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/madlag\/orgs","repos_url":"https:\/\/api.github.com\/users\/madlag\/repos","events_url":"https:\/\/api.github.com\/users\/madlag\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/madlag\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-02T11:21:18Z","updated_at":"2021-06-08T13:42:25Z","closed_at":"2021-06-08T13:42:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/997","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/997","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/997.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/997.patch","merged_at":null},"body":"Datasets from https:\/\/github.com\/microsoft\/CodeXGLUE\r\n\r\nThis contains 13 datasets:\r\n\r\ncode_x_glue_cc_clone_detection_big_clone_bench\r\ncode_x_glue_cc_clone_detection_poj_104\r\ncode_x_glue_cc_cloze_testing_all\r\ncode_x_glue_cc_cloze_testing_maxmin\r\ncode_x_glue_cc_code_completion_line\r\ncode_x_glue_cc_code_completion_token\r\ncode_x_glue_cc_code_refinement\r\ncode_x_glue_cc_code_to_code_trans\r\ncode_x_glue_cc_defect_detection\r\ncode_x_glue_ct_code_to_text\r\ncode_x_glue_tc_nl_code_search_adv\r\ncode_x_glue_tc_text_to_code\r\ncode_x_glue_tt_text_to_text\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/997\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/997\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/996","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/996\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/996\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/996\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/996","id":755176084,"node_id":"MDU6SXNzdWU3NTUxNzYwODQ=","number":996,"title":"NotADirectoryError while loading the CNN\/Dailymail dataset","user":{"login":"arc-bu","id":75367920,"node_id":"MDQ6VXNlcjc1MzY3OTIw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75367920?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arc-bu","html_url":"https:\/\/github.com\/arc-bu","followers_url":"https:\/\/api.github.com\/users\/arc-bu\/followers","following_url":"https:\/\/api.github.com\/users\/arc-bu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arc-bu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arc-bu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arc-bu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arc-bu\/orgs","repos_url":"https:\/\/api.github.com\/users\/arc-bu\/repos","events_url":"https:\/\/api.github.com\/users\/arc-bu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arc-bu\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2020-12-02T11:07:56Z","updated_at":"2021-12-21T10:20:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"\r\nDownloading and preparing dataset cnn_dailymail\/3.0.0 (download: 558.32 MiB, generated: 1.28 GiB, post-processed: Unknown size, total: 1.82 GiB) to \/root\/.cache\/huggingface\/datasets\/cnn_dailymail\/3.0.0\/3.0.0\/0128610a44e10f25b4af6689441c72af86205282d26399642f7db38fa7535602...\r\n\r\n---------------------------------------------------------------------------\r\n\r\nNotADirectoryError Traceback (most recent call last)\r\n\r\n in ()\r\n 22 \r\n 23 \r\n---> 24 train = load_dataset('cnn_dailymail', '3.0.0', split='train')\r\n 25 validation = load_dataset('cnn_dailymail', '3.0.0', split='validation')\r\n 26 test = load_dataset('cnn_dailymail', '3.0.0', split='test')\r\n\r\n5 frames\r\n\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/cnn_dailymail\/0128610a44e10f25b4af6689441c72af86205282d26399642f7db38fa7535602\/cnn_dailymail.py in _find_files(dl_paths, publisher, url_dict)\r\n 132 else:\r\n 133 logging.fatal(\"Unsupported publisher: %s\", publisher)\r\n--> 134 files = sorted(os.listdir(top_dir))\r\n 135 \r\n 136 ret_files = []\r\n\r\nNotADirectoryError: [Errno 20] Not a directory: '\/root\/.cache\/huggingface\/datasets\/downloads\/1bc05d24fa6dda2468e83a73cf6dc207226e01e3c48a507ea716dc0421da583b\/cnn\/stories'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/996\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/996\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/995","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/995\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/995\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/995\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/995","id":755175199,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwOTM3NjI3","number":995,"title":"added dataset circa","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T11:06:39Z","updated_at":"2020-12-04T10:58:16Z","closed_at":"2020-12-03T09:39:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/995","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/995","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/995.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/995.patch","merged_at":"2020-12-03T09:39:37Z"},"body":"Dataset Circa added. Only README.md and dataset card left","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/995\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/995\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/994","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/994\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/994\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/994\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/994","id":755146834,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwOTE1MDc2","number":994,"title":"Add Sepedi ner corpus","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-02T10:30:07Z","updated_at":"2020-12-03T10:19:14Z","closed_at":"2020-12-02T18:20:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/994","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/994","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/994.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/994.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/994\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/994\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/993","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/993\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/993\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/993\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/993","id":755135768,"node_id":"MDU6SXNzdWU3NTUxMzU3Njg=","number":993,"title":"Problem downloading amazon_reviews_multi","user":{"login":"hfawaz","id":29229602,"node_id":"MDQ6VXNlcjI5MjI5NjAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29229602?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hfawaz","html_url":"https:\/\/github.com\/hfawaz","followers_url":"https:\/\/api.github.com\/users\/hfawaz\/followers","following_url":"https:\/\/api.github.com\/users\/hfawaz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hfawaz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hfawaz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hfawaz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hfawaz\/orgs","repos_url":"https:\/\/api.github.com\/users\/hfawaz\/repos","events_url":"https:\/\/api.github.com\/users\/hfawaz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hfawaz\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-02T10:15:57Z","updated_at":"2020-12-04T09:38:13Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Thanks for adding the dataset. \r\nAfter trying to load the dataset, I am getting the following error: \r\n`ConnectionError: Couldn't reach https:\/\/amazon-reviews-ml.s3-us-west-2.amazonaws.com\/json\/train\/dataset_fr_train.json\r\n`\r\nI used the following code to load the dataset: \r\n`load_dataset(\r\n dataset_name,\r\n \"all_languages\",\r\n cache_dir=\".data\"\r\n )`\r\n\r\nI am using version 1.1.3 of `datasets`\r\n\r\nNote that I can perform a successfull `wget https:\/\/amazon-reviews-ml.s3-us-west-2.amazonaws.com\/json\/train\/dataset_fr_train.json`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/993\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/993\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/992","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/992\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/992\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/992\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/992","id":755124963,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwODk3Njkx","number":992,"title":"Add CAIL 2018 dataset","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T10:01:40Z","updated_at":"2020-12-02T16:49:02Z","closed_at":"2020-12-02T16:49:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/992","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/992","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/992.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/992.patch","merged_at":"2020-12-02T16:49:01Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/992\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/992\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/991","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/991\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/991\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/991\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/991","id":755117902,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwODkyMDk0","number":991,"title":"Adding farsi_news dataset (https:\/\/github.com\/sci2lab\/Farsi-datasets)","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T09:52:19Z","updated_at":"2020-12-03T11:01:26Z","closed_at":"2020-12-03T11:01:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/991","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/991","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/991.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/991.patch","merged_at":"2020-12-03T11:01:26Z"},"body":null,"reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/991\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/991\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/990","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/990\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/990\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/990\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/990","id":755097798,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwODc1NDYx","number":990,"title":"Add E2E NLG","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T09:25:12Z","updated_at":"2020-12-03T13:08:05Z","closed_at":"2020-12-03T13:08:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/990","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/990","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/990.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/990.patch","merged_at":"2020-12-03T13:08:04Z"},"body":"Adding the E2E NLG dataset.\r\n\r\nMore info here : http:\/\/www.macs.hw.ac.uk\/InteractionLab\/E2E\/\r\n\r\n### Checkbox\r\n\r\n- [x] Create the dataset script `\/datasets\/my_dataset\/my_dataset.py` using the template\r\n- [x] Fill the `_DESCRIPTION` and `_CITATION` variables\r\n- [x] Implement `_infos()`, `_split_generators()` and `_generate_examples()`\r\n- [x] Make sure that the `BUILDER_CONFIGS` class attribute is filled with the different configurations of the dataset and that the `BUILDER_CONFIG_CLASS` is specified if there is a custom config class.\r\n- [x] Generate the metadata file `dataset_infos.json` for all configurations\r\n- [x] Generate the dummy data `dummy_data.zip` files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card `README.md` using the template and at least fill the tags \r\n- [x] Both tests for the real data and the dummy data pass.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/990\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/990\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/989","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/989\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/989\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/989\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/989","id":755079394,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwODYwNDMw","number":989,"title":"Fix SV -> NO","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T08:59:59Z","updated_at":"2020-12-02T09:18:21Z","closed_at":"2020-12-02T09:18:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/989","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/989","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/989.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/989.patch","merged_at":"2020-12-02T09:18:14Z"},"body":"This PR fixes the small typo as seen in #956 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/989\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/989\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/988","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/988\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/988\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/988\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/988","id":755069159,"node_id":"MDU6SXNzdWU3NTUwNjkxNTk=","number":988,"title":"making sure datasets are not loaded in memory and distributed training of them","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T08:45:15Z","updated_at":"2020-12-02T08:50:34Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am dealing with large-scale datasets which I need to train distributedly, I used the shard function to divide the dataset across the cores, without any sampler, this does not work for distributed training and does not become any faster than 1 TPU core. 1) how I can make sure data is not loaded in memory 2) in case of distributed training with iterative datasets which measures needs to be taken? Is this all sharding the data only. I was wondering if there can be possibility for me to discuss this with someone with distributed training with iterative datasets using dataset library. thanks ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/988\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/988\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/987","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/987\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/987\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/987\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/987","id":755059469,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwODQ0MTQ4","number":987,"title":"Add OPUS DOGC dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-02T08:30:32Z","updated_at":"2020-12-04T13:27:41Z","closed_at":"2020-12-04T13:27:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/987","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/987","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/987.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/987.patch","merged_at":"2020-12-04T13:27:41Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/987\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/987\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/986","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/986\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/986\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/986\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/986","id":755047470,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwODM0MzYx","number":986,"title":"Add SciTLDR Dataset","user":{"login":"Bharat123rox","id":13381361,"node_id":"MDQ6VXNlcjEzMzgxMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13381361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Bharat123rox","html_url":"https:\/\/github.com\/Bharat123rox","followers_url":"https:\/\/api.github.com\/users\/Bharat123rox\/followers","following_url":"https:\/\/api.github.com\/users\/Bharat123rox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Bharat123rox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Bharat123rox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Bharat123rox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Bharat123rox\/orgs","repos_url":"https:\/\/api.github.com\/users\/Bharat123rox\/repos","events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-02T08:11:16Z","updated_at":"2020-12-02T18:37:22Z","closed_at":"2020-12-02T18:02:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/986","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/986","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/986.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/986.patch","merged_at":null},"body":"Adds the SciTLDR Dataset by AI2\r\nAdded README card with tags to the best of my knowledge\r\n\r\nMulti-target summaries or TLDRs of Scientific Documents","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/986\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/986\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/985","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/985\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/985\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/985\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/985","id":755020564,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwODEyNTM1","number":985,"title":"Add GAP dataset","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-02T07:25:11Z","updated_at":"2020-12-02T16:16:32Z","closed_at":"2020-12-02T16:16:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/985","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/985","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/985.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/985.patch","merged_at":null},"body":"GAP dataset\r\nGender bias coreference resolution","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/985\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/985\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/984","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/984\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/984\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/984\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/984","id":755009916,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwODAzNzgw","number":984,"title":"committing Whoa file","user":{"login":"StulosDunamos","id":75356780,"node_id":"MDQ6VXNlcjc1MzU2Nzgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75356780?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/StulosDunamos","html_url":"https:\/\/github.com\/StulosDunamos","followers_url":"https:\/\/api.github.com\/users\/StulosDunamos\/followers","following_url":"https:\/\/api.github.com\/users\/StulosDunamos\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/StulosDunamos\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/StulosDunamos\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/StulosDunamos\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/StulosDunamos\/orgs","repos_url":"https:\/\/api.github.com\/users\/StulosDunamos\/repos","events_url":"https:\/\/api.github.com\/users\/StulosDunamos\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/StulosDunamos\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-02T07:07:46Z","updated_at":"2020-12-02T16:15:29Z","closed_at":"2020-12-02T15:40:58Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/984","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/984","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/984.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/984.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/984\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/984\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/983","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/983\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/983\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/983\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/983","id":754966620,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNzY4MTMw","number":983,"title":"add mc taco","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T05:54:55Z","updated_at":"2020-12-02T15:37:47Z","closed_at":"2020-12-02T15:37:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/983","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/983","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/983.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/983.patch","merged_at":"2020-12-02T15:37:46Z"},"body":"MC-TACO\r\nTemporal commonsense knowledge","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/983\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/983\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/982","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/982\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/982\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/982\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/982","id":754946337,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNzUxMzYx","number":982,"title":"add prachathai67k take2","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T05:12:01Z","updated_at":"2020-12-02T10:18:11Z","closed_at":"2020-12-02T10:18:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/982","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/982","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/982.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/982.patch","merged_at":"2020-12-02T10:18:11Z"},"body":"I decided it will be faster to create a new pull request instead of fixing the rebase issues.\r\ncontinuing from https:\/\/github.com\/huggingface\/datasets\/pull\/954\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/982\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/982\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/981","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/981\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/981\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/981\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/981","id":754937612,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNzQ0MTYx","number":981,"title":"add wisesight_sentiment take2","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T04:50:59Z","updated_at":"2020-12-02T10:37:13Z","closed_at":"2020-12-02T10:37:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/981","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/981","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/981.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/981.patch","merged_at":"2020-12-02T10:37:13Z"},"body":"Take 2 since last time the rebase issues were taking me too much time to fix as opposed to just open a new one.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/981\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/981\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/980","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/980\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/980\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/980\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/980","id":754899301,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNzEzNjY3","number":980,"title":"Wongnai - Thai reviews dataset","user":{"login":"mapmeld","id":643918,"node_id":"MDQ6VXNlcjY0MzkxOA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/643918?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mapmeld","html_url":"https:\/\/github.com\/mapmeld","followers_url":"https:\/\/api.github.com\/users\/mapmeld\/followers","following_url":"https:\/\/api.github.com\/users\/mapmeld\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mapmeld\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mapmeld\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mapmeld\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mapmeld\/orgs","repos_url":"https:\/\/api.github.com\/users\/mapmeld\/repos","events_url":"https:\/\/api.github.com\/users\/mapmeld\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mapmeld\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-02T03:20:08Z","updated_at":"2020-12-02T15:34:41Z","closed_at":"2020-12-02T15:30:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/980","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/980","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/980.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/980.patch","merged_at":"2020-12-02T15:30:04Z"},"body":"40,000 reviews, previously released on GitHub ( https:\/\/github.com\/wongnai\/wongnai-corpus ) with an LGPL license, and on a closed Kaggle competition ( https:\/\/www.kaggle.com\/c\/wongnai-challenge-review-rating-prediction\/ )","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/980\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/980\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/979","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/979\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/979\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/979\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/979","id":754893337,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNzA4OTA5","number":979,"title":"[WIP] Add multi woz","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T03:05:42Z","updated_at":"2020-12-02T16:07:16Z","closed_at":"2020-12-02T16:07:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/979","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/979","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/979.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/979.patch","merged_at":"2020-12-02T16:07:16Z"},"body":"This PR adds version 2.2 of the Multi-domain Wizard of OZ dataset: https:\/\/github.com\/budzianowski\/multiwoz\/tree\/master\/data\/MultiWOZ_2.2\r\n\r\nIt was a pretty big chunk of work to figure out the structure, so I stil have tol add the description to the README.md\r\n\r\nOn the plus side the structure is broadly similar to that of the Google Schema Guided dialogue [dataset](https:\/\/github.com\/google-research-datasets\/dstc8-schema-guided-dialogue), so will take care of that one next.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/979\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/979\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/978","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/978\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/978\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/978\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/978","id":754854478,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNjc4NTUy","number":978,"title":"Add code refinement","user":{"login":"reshinthadithyan","id":36307201,"node_id":"MDQ6VXNlcjM2MzA3MjAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36307201?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/reshinthadithyan","html_url":"https:\/\/github.com\/reshinthadithyan","followers_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/followers","following_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/repos","events_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/reshinthadithyan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-12-02T01:29:58Z","updated_at":"2020-12-07T01:52:58Z","closed_at":"2020-12-07T01:52:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/978","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/978","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/978.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/978.patch","merged_at":null},"body":"### OVERVIEW\r\nMillions of open-source projects with numerous bug fixes\r\nare available in code repositories. This proliferation\r\nof software development histories can be leveraged to\r\nlearn how to fix common programming bugs\r\nCode refinement aims to automatically fix bugs in the code,\r\nwhich can contribute to reducing the cost of bug-fixes for developers.\r\nGiven a piece of Java code with bugs,\r\nthe task is to remove the bugs to output the refined code.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/978\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/978\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/977","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/977\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/977\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/977\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/977","id":754839594,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNjY2ODg3","number":977,"title":"Add ROPES dataset","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T00:52:10Z","updated_at":"2020-12-02T10:58:36Z","closed_at":"2020-12-02T10:58:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/977","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/977","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/977.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/977.patch","merged_at":"2020-12-02T10:58:35Z"},"body":"ROPES dataset \r\nReasoning over paragraph effects in situations - testing a system's ability to apply knowledge from a passage of text to a new situation. The task is framed into a reading comprehension task following squad-style extractive qa.\r\n\r\nOne thing to note: labels of the test set are hidden (leaderboard submission) so I encoded that as an empty list (ropes.py:L125)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/977\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/977\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/976","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/976\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/976\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/976\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/976","id":754826146,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNjU1NzM5","number":976,"title":"Arabic pos dialect","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-02T00:21:13Z","updated_at":"2020-12-09T17:30:32Z","closed_at":"2020-12-09T17:30:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/976","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/976","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/976.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/976.patch","merged_at":null},"body":"A README.md and loading script for the Arabic POS Dialect dataset. The README is missing the sections on personal information, biases, and limitations, as it would probably be better for those to be filled by someone who can read the contents of the dataset and is familiar with Arabic NLP. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/976\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/976\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/975","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/975\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/975\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/975\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/975","id":754823701,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNjUzNjg4","number":975,"title":"add MeTooMA dataset","user":{"login":"akash418","id":23264033,"node_id":"MDQ6VXNlcjIzMjY0MDMz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23264033?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/akash418","html_url":"https:\/\/github.com\/akash418","followers_url":"https:\/\/api.github.com\/users\/akash418\/followers","following_url":"https:\/\/api.github.com\/users\/akash418\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/akash418\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/akash418\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/akash418\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/akash418\/orgs","repos_url":"https:\/\/api.github.com\/users\/akash418\/repos","events_url":"https:\/\/api.github.com\/users\/akash418\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/akash418\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-02T00:15:55Z","updated_at":"2020-12-02T10:58:56Z","closed_at":"2020-12-02T10:58:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/975","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/975","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/975.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/975.patch","merged_at":"2020-12-02T10:58:55Z"},"body":"This PR adds the #MeToo MA dataset. It presents multi-label data points for tweets mined in the backdrop of the #MeToo movement. The dataset includes data points in the form of Tweet ids and appropriate labels. Please refer to the accompanying paper for detailed information regarding annotation, collection, and guidelines.\r\n\r\nPaper: https:\/\/ojs.aaai.org\/index.php\/ICWSM\/article\/view\/7292\r\nDataset Link: https:\/\/dataverse.harvard.edu\/dataset.xhtml?persistentId=doi:10.7910\/DVN\/JN4EYU\r\n\r\n\r\n---\r\nannotations_creators:\r\n- expert-generated\r\nlanguage_creators:\r\n- found\r\nlanguages:\r\n- en\r\nmultilinguality:\r\n- monolingual\r\nsize_categories:\r\n- 1K https:\/\/rowanzellers.com\/swag\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/970\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/970\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/969","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/969\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/969\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/969\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/969","id":754681940,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNTM4ODQz","number":969,"title":"Add wiki auto dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T19:58:11Z","updated_at":"2020-12-02T16:19:14Z","closed_at":"2020-12-02T16:19:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/969","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/969","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/969.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/969.patch","merged_at":"2020-12-02T16:19:14Z"},"body":"This PR adds the WikiAuto sentence simplification dataset\r\n\r\nhttps:\/\/github.com\/chaojiang06\/wiki-auto\r\n\r\nThis is also a prospective GEM task, hence the README.md","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/969\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/969\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/968","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/968\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/968\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/968\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/968","id":754659015,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNTIwMjEz","number":968,"title":"ADD Afrikaans NER","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-01T19:23:03Z","updated_at":"2020-12-02T09:41:28Z","closed_at":"2020-12-02T09:41:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/968","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/968","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/968.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/968.patch","merged_at":"2020-12-02T09:41:28Z"},"body":"Afrikaans NER corpus","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/968\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/968\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/967","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/967\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/967\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/967\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/967","id":754578988,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNDU0OTI3","number":967,"title":"Add CS Restaurants dataset","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-01T17:17:37Z","updated_at":"2020-12-02T17:57:44Z","closed_at":"2020-12-02T17:57:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/967","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/967","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/967.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/967.patch","merged_at":null},"body":"This PR adds the Czech restaurants dataset for Czech NLG.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/967\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/967\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/966","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/966\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/966\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/966\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/966","id":754558686,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNDM4NDE4","number":966,"title":"Add CLINC150 Dataset","user":{"login":"sumanthd17","id":28291870,"node_id":"MDQ6VXNlcjI4MjkxODcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28291870?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sumanthd17","html_url":"https:\/\/github.com\/sumanthd17","followers_url":"https:\/\/api.github.com\/users\/sumanthd17\/followers","following_url":"https:\/\/api.github.com\/users\/sumanthd17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sumanthd17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sumanthd17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sumanthd17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sumanthd17\/orgs","repos_url":"https:\/\/api.github.com\/users\/sumanthd17\/repos","events_url":"https:\/\/api.github.com\/users\/sumanthd17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sumanthd17\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-12-01T16:50:13Z","updated_at":"2020-12-02T18:45:43Z","closed_at":"2020-12-02T18:45:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/966","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/966","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/966.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/966.patch","merged_at":null},"body":"Added CLINC150 Dataset. The link to the dataset can be found [here](https:\/\/github.com\/clinc\/oos-eval) and the paper can be found [here](https:\/\/www.aclweb.org\/anthology\/D19-1131.pdf)\r\n\r\n- [x] Followed the instructions in CONTRIBUTING.md\r\n- [x] Ran the tests successfully\r\n- [x] Created the dummy data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/966\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/966\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/965","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/965\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/965\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/965\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/965","id":754553169,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwNDMzODQ2","number":965,"title":"Add CLINC150 Dataset","user":{"login":"sumanthd17","id":28291870,"node_id":"MDQ6VXNlcjI4MjkxODcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28291870?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sumanthd17","html_url":"https:\/\/github.com\/sumanthd17","followers_url":"https:\/\/api.github.com\/users\/sumanthd17\/followers","following_url":"https:\/\/api.github.com\/users\/sumanthd17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sumanthd17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sumanthd17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sumanthd17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sumanthd17\/orgs","repos_url":"https:\/\/api.github.com\/users\/sumanthd17\/repos","events_url":"https:\/\/api.github.com\/users\/sumanthd17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sumanthd17\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T16:43:00Z","updated_at":"2020-12-01T16:51:16Z","closed_at":"2020-12-01T16:49:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/965","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/965","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/965.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/965.patch","merged_at":null},"body":"Added CLINC150 Dataset. The link to the dataset can be found [here](https:\/\/github.com\/clinc\/oos-eval) and the paper can be found [here](https:\/\/www.aclweb.org\/anthology\/D19-1131.pdf)\r\n\r\n- [x] Followed the instructions in CONTRIBUTING.md\r\n- [x] Ran the tests successfully\r\n- [x] Created the dummy data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/965\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/965\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/964","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/964\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/964\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/964\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/964","id":754474660,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMzY4OTAy","number":964,"title":"Adding the WebNLG dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-01T15:05:23Z","updated_at":"2020-12-02T17:34:05Z","closed_at":"2020-12-02T17:34:05Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/964","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/964","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/964.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/964.patch","merged_at":"2020-12-02T17:34:05Z"},"body":"This PR adds data from the WebNLG challenge, with one configuration per release and challenge iteration.\r\n\r\nMore information can be found [here](https:\/\/webnlg-challenge.loria.fr\/)\r\n\r\nUnfortunately, the data itself comes from a pretty large number of small XML files, so the dummy data ends up being quite large (8.4 MB even keeping only one example per file).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/964\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/964\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/963","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/963\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/963\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/963\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/963","id":754451234,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMzQ5NjQ4","number":963,"title":"add CODAH dataset","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T14:37:05Z","updated_at":"2020-12-02T13:45:58Z","closed_at":"2020-12-02T13:21:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/963","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/963","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/963.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/963.patch","merged_at":"2020-12-02T13:21:25Z"},"body":"Adding CODAH dataset.\r\n\r\nMore info:\r\nhttps:\/\/github.com\/Websail-NU\/CODAH","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/963\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/963\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/962","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/962\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/962\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/962\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/962","id":754441428,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMzQxMDA2","number":962,"title":"Add Danish Political Comments Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T14:28:32Z","updated_at":"2020-12-03T10:31:55Z","closed_at":"2020-12-03T10:31:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/962","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/962","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/962.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/962.patch","merged_at":"2020-12-03T10:31:54Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/962\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/962\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/961","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/961\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/961\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/961\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/961","id":754434398,"node_id":"MDU6SXNzdWU3NTQ0MzQzOTg=","number":961,"title":"sample multiple datasets ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-01T14:20:02Z","updated_at":"2020-12-02T01:32:44Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am dealing with multiple datasets, I need to have a dataloader over them with a condition that in each batch data samples are coming from one of the datasets. My main question is: \r\n- I need to have a way to sample the datasets first with some weights, lets say 2x dataset1 1x dataset2, could you point me how I can do it\r\n\r\nsub-questions:\r\n- I want to concat sampled datasets and define one dataloader on it, then I need a way to make sure batches come from 1 dataset in each iteration, could you assist me how I can do?\r\n- I use iterative-type of datasets, but I need a method of shuffling still since it brings accuracy performance issues if not doing it, thanks for the help. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/961\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/961\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/960","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/960\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/960\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/960\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/960","id":754422710,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMzI1MzUx","number":960,"title":"Add code to automate parts of the dataset card","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T14:04:51Z","updated_at":"2021-04-26T07:56:01Z","closed_at":"2021-04-26T07:56:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/960","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/960","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/960.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/960.patch","merged_at":null},"body":"Most parts of the \"Dataset Structure\" section can be generated automatically. This PR adds some code to do so.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/960\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/960\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/959","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/959\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/959\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/959\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/959","id":754418610,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMzIxOTM1","number":959,"title":"Add Tunizi Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T13:59:39Z","updated_at":"2020-12-03T14:21:41Z","closed_at":"2020-12-03T14:21:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/959","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/959","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/959.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/959.patch","merged_at":"2020-12-03T14:21:40Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/959\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/959\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/958","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/958\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/958\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/958\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/958","id":754404095,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMzA5ODkz","number":958,"title":"dataset(ncslgr): add initial loading script","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-01T13:41:17Z","updated_at":"2020-12-07T16:35:39Z","closed_at":"2020-12-07T16:35:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/958","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/958","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/958.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/958.patch","merged_at":"2020-12-07T16:35:39Z"},"body":"clean #789","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/958\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/958\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/957","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/957\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/957\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/957\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/957","id":754380073,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjg5OTk4","number":957,"title":"Isixhosa ner corpus","user":{"login":"yvonnegitau","id":7923902,"node_id":"MDQ6VXNlcjc5MjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7923902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yvonnegitau","html_url":"https:\/\/github.com\/yvonnegitau","followers_url":"https:\/\/api.github.com\/users\/yvonnegitau\/followers","following_url":"https:\/\/api.github.com\/users\/yvonnegitau\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yvonnegitau\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yvonnegitau\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yvonnegitau\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yvonnegitau\/orgs","repos_url":"https:\/\/api.github.com\/users\/yvonnegitau\/repos","events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yvonnegitau\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T13:08:36Z","updated_at":"2020-12-01T18:14:58Z","closed_at":"2020-12-01T18:14:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/957","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/957","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/957.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/957.patch","merged_at":"2020-12-01T18:14:58Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/957\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/957\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/956","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/956\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/956\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/956\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/956","id":754368378,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjgwMzU1","number":956,"title":"Add Norwegian NER","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-01T12:51:02Z","updated_at":"2020-12-02T08:53:11Z","closed_at":"2020-12-01T18:09:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/956","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/956","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/956.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/956.patch","merged_at":"2020-12-01T18:09:21Z"},"body":"This PR adds the [Norwegian NER](https:\/\/github.com\/ljos\/navnkjenner) dataset.\r\n\r\nI have added the `conllu` package as a test dependency. This is required to properly parse the `.conllu` files.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/956\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/956\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/955","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/955\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/955\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/955\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/955","id":754367291,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjc5NDQw","number":955,"title":"Added PragmEval benchmark","user":{"login":"sileod","id":9168444,"node_id":"MDQ6VXNlcjkxNjg0NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9168444?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sileod","html_url":"https:\/\/github.com\/sileod","followers_url":"https:\/\/api.github.com\/users\/sileod\/followers","following_url":"https:\/\/api.github.com\/users\/sileod\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sileod\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sileod\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sileod\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sileod\/orgs","repos_url":"https:\/\/api.github.com\/users\/sileod\/repos","events_url":"https:\/\/api.github.com\/users\/sileod\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sileod\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-12-01T12:49:15Z","updated_at":"2020-12-04T10:43:32Z","closed_at":"2020-12-03T09:36:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/955","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/955","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/955.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/955.patch","merged_at":"2020-12-03T09:36:47Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/955\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/955\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/954","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/954\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/954\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/954\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/954","id":754362012,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjc1MDY4","number":954,"title":"add prachathai67k","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-01T12:40:55Z","updated_at":"2020-12-02T05:12:11Z","closed_at":"2020-12-02T04:43:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/954","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/954","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/954.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/954.patch","merged_at":null},"body":"`prachathai-67k`: News Article Corpus and Multi-label Text Classificdation from Prachathai.com\r\nThe prachathai-67k dataset was scraped from the news site Prachathai.\r\nWe filtered out those articles with less than 500 characters of body text, mostly images and cartoons.\r\nIt contains 67,889 articles wtih 12 curated tags from August 24, 2004 to November 15, 2018.\r\nThe dataset was originally scraped by @lukkiddd and cleaned by @cstorm125.\r\nYou can also see preliminary exploration at https:\/\/github.com\/PyThaiNLP\/prachathai-67k\/blob\/master\/exploration.ipynb","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/954\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/954\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/953","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/953\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/953\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/953\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/953","id":754359942,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjczMzg5","number":953,"title":"added health_fact dataset ","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-01T12:37:44Z","updated_at":"2020-12-01T23:11:33Z","closed_at":"2020-12-01T23:11:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/953","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/953","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/953.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/953.patch","merged_at":"2020-12-01T23:11:33Z"},"body":"Added dataset Explainable Fact-Checking for Public Health Claims (dataset_id: health_fact)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/953\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/953\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/952","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/952\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/952\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/952\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/952","id":754357270,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjcxMTQz","number":952,"title":"Add orange sum","user":{"login":"moussaKam","id":28675016,"node_id":"MDQ6VXNlcjI4Njc1MDE2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28675016?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/moussaKam","html_url":"https:\/\/github.com\/moussaKam","followers_url":"https:\/\/api.github.com\/users\/moussaKam\/followers","following_url":"https:\/\/api.github.com\/users\/moussaKam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/moussaKam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/moussaKam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/moussaKam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/moussaKam\/orgs","repos_url":"https:\/\/api.github.com\/users\/moussaKam\/repos","events_url":"https:\/\/api.github.com\/users\/moussaKam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/moussaKam\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T12:33:34Z","updated_at":"2020-12-01T15:44:00Z","closed_at":"2020-12-01T15:44:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/952","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/952","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/952.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/952.patch","merged_at":"2020-12-01T15:44:00Z"},"body":"Add OrangeSum a french abstractive summarization dataset. \r\n\r\nPaper: [BARThez: a Skilled Pretrained French Sequence-to-Sequence Model](https:\/\/arxiv.org\/abs\/2010.12321)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/952\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/952\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/951","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/951\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/951\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/951\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/951","id":754349979,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjY1MTY0","number":951,"title":"Prachathai67k","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-01T12:21:52Z","updated_at":"2020-12-01T12:29:53Z","closed_at":"2020-12-01T12:28:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/951","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/951","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/951.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/951.patch","merged_at":null},"body":"Add `prachathai-67k`: News Article Corpus and Multi-label Text Classificdation from Prachathai.com\r\n\r\nThe `prachathai-67k` dataset was scraped from the news site [Prachathai](prachathai.com). We filtered out those articles with less than 500 characters of body text, mostly images and cartoons. It contains 67,889 articles wtih 12 curated tags from August 24, 2004 to November 15, 2018. The dataset was originally scraped by [@lukkiddd](https:\/\/github.com\/lukkiddd) and cleaned by [@cstorm125](https:\/\/github.com\/cstorm125). Download the dataset [here](https:\/\/www.dropbox.com\/s\/fsxepdka4l2pr45\/prachathai-67k.zip?dl=1). You can also see preliminary exploration in [exploration.ipynb](https:\/\/github.com\/PyThaiNLP\/prachathai-67k\/blob\/master\/exploration.ipynb).\r\n\r\nThis dataset is a part of [pyThaiNLP](https:\/\/github.com\/PyThaiNLP\/) Thai text [classification-benchmarks](https:\/\/github.com\/PyThaiNLP\/classification-benchmarks). For the benchmark, we selected the following tags with substantial volume that resemble **classifying types of articles**:\r\n\r\n* `\u0e01\u0e32\u0e23\u0e40\u0e21\u0e37\u0e2d\u0e07` - politics\r\n* `\u0e2a\u0e34\u0e17\u0e18\u0e34\u0e21\u0e19\u0e38\u0e29\u0e22\u0e0a\u0e19` - human_rights\r\n* `\u0e04\u0e38\u0e13\u0e20\u0e32\u0e1e\u0e0a\u0e35\u0e27\u0e34\u0e15` - quality_of_life\r\n* `\u0e15\u0e48\u0e32\u0e07\u0e1b\u0e23\u0e30\u0e40\u0e17\u0e28` - international\r\n* `\u0e2a\u0e31\u0e07\u0e04\u0e21` - social\r\n* `\u0e2a\u0e34\u0e48\u0e07\u0e41\u0e27\u0e14\u0e25\u0e49\u0e2d\u0e21` - environment\r\n* `\u0e40\u0e28\u0e23\u0e29\u0e10\u0e01\u0e34\u0e08` - economics\r\n* `\u0e27\u0e31\u0e12\u0e19\u0e18\u0e23\u0e23\u0e21` - culture\r\n* `\u0e41\u0e23\u0e07\u0e07\u0e32\u0e19` - labor\r\n* `\u0e04\u0e27\u0e32\u0e21\u0e21\u0e31\u0e48\u0e19\u0e04\u0e07` - national_security\r\n* `\u0e44\u0e2d\u0e0b\u0e35\u0e17\u0e35` - ict\r\n* `\u0e01\u0e32\u0e23\u0e28\u0e36\u0e01\u0e29\u0e32` - education","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/951\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/951\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/950","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/950\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/950\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/950\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/950","id":754318686,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjM4OTQx","number":950,"title":"Support .xz file format","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T11:34:48Z","updated_at":"2020-12-01T13:39:18Z","closed_at":"2020-12-01T13:39:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/950","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/950","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/950.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/950.patch","merged_at":"2020-12-01T13:39:18Z"},"body":"Add support to extract\/uncompress files in .xz format.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/950\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/950\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/949","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/949\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/949\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/949\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/949","id":754317777,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjM4MTky","number":949,"title":"Add GermaNER Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-01T11:33:31Z","updated_at":"2020-12-03T14:06:41Z","closed_at":"2020-12-03T14:06:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/949","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/949","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/949.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/949.patch","merged_at":"2020-12-03T14:06:40Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/949\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/949\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/948","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/948\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/948\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/948\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/948","id":754306260,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjI4NjQz","number":948,"title":"docs(ADD_NEW_DATASET): correct indentation for script","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T11:17:38Z","updated_at":"2020-12-01T11:25:18Z","closed_at":"2020-12-01T11:25:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/948","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/948","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/948.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/948.patch","merged_at":"2020-12-01T11:25:18Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/948\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/948\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/947","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/947\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/947\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/947\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/947","id":754286658,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjEyMjc3","number":947,"title":"Add europeana newspapers","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T10:52:18Z","updated_at":"2020-12-02T09:42:35Z","closed_at":"2020-12-02T09:42:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/947","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/947","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/947.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/947.patch","merged_at":"2020-12-02T09:42:09Z"},"body":"This PR adds the [Europeana newspapers](https:\/\/github.com\/EuropeanaNewspapers\/ner-corpora) dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/947\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/947\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/946","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/946\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/946\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/946\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/946","id":754278632,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjA1Nzgw","number":946,"title":"add PEC dataset","user":{"login":"zhongpeixiang","id":11826803,"node_id":"MDQ6VXNlcjExODI2ODAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11826803?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zhongpeixiang","html_url":"https:\/\/github.com\/zhongpeixiang","followers_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/followers","following_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/orgs","repos_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/repos","events_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zhongpeixiang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-12-01T10:41:41Z","updated_at":"2020-12-03T02:47:14Z","closed_at":"2020-12-03T02:47:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/946","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/946","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/946.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/946.patch","merged_at":null},"body":"A persona-based empathetic conversation dataset published at EMNLP 2020.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/946\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/946\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/945","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/945\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/945\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/945\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/945","id":754273920,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMjAyMDM1","number":945,"title":"Adding Babi dataset - English version","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-01T10:35:36Z","updated_at":"2020-12-04T15:43:05Z","closed_at":"2020-12-04T15:42:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/945","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/945","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/945.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/945.patch","merged_at":null},"body":"Adding the English version of bAbI.\r\n\r\nSamples are taken from ParlAI for consistency with the main users at the moment.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/945\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/945\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/944","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/944\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/944\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/944\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/944","id":754228947,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMTY0NTU5","number":944,"title":"Add German Legal Entity Recognition Dataset","user":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-01T09:38:22Z","updated_at":"2020-12-03T13:06:56Z","closed_at":"2020-12-03T13:06:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/944","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/944","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/944.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/944.patch","merged_at":"2020-12-03T13:06:54Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/944\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/944\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/943","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/943\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/943\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/943\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/943","id":754192491,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMTM2ODM3","number":943,"title":"The FLUE Benchmark","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T09:00:50Z","updated_at":"2020-12-01T15:24:38Z","closed_at":"2020-12-01T15:24:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/943","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/943","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/943.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/943.patch","merged_at":"2020-12-01T15:24:30Z"},"body":"This PR adds the [FLUE](https:\/\/github.com\/getalp\/Flaubert\/tree\/master\/flue) benchmark which is a set of different datasets to evaluate models for French content.\r\n\r\nTwo datasets are missing, the French Treebank that we can use only for research purpose and we are not allowed to distribute, and the Word Sense disambiguation for Nouns that will be added later.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/943\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/943\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/942","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/942\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/942\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/942\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/942","id":754162318,"node_id":"MDU6SXNzdWU3NTQxNjIzMTg=","number":942,"title":"D","user":{"login":"CryptoMiKKi","id":74238514,"node_id":"MDQ6VXNlcjc0MjM4NTE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/74238514?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/CryptoMiKKi","html_url":"https:\/\/github.com\/CryptoMiKKi","followers_url":"https:\/\/api.github.com\/users\/CryptoMiKKi\/followers","following_url":"https:\/\/api.github.com\/users\/CryptoMiKKi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/CryptoMiKKi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/CryptoMiKKi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/CryptoMiKKi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/CryptoMiKKi\/orgs","repos_url":"https:\/\/api.github.com\/users\/CryptoMiKKi\/repos","events_url":"https:\/\/api.github.com\/users\/CryptoMiKKi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/CryptoMiKKi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-12-01T08:17:10Z","updated_at":"2020-12-03T16:42:53Z","closed_at":"2020-12-03T16:42:53Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/942\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/942\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/941","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/941\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/941\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/941\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/941","id":754141321,"node_id":"MDExOlB1bGxSZXF1ZXN0NTMwMDk0MTI2","number":941,"title":"Add People's Daily NER dataset","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-01T07:48:53Z","updated_at":"2020-12-02T18:42:43Z","closed_at":"2020-12-02T18:42:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/941","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/941","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/941.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/941.patch","merged_at":"2020-12-02T18:42:41Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/941\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/941\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/940","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/940\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/940\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/940\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/940","id":754010753,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI5OTc3OTQ2","number":940,"title":"Add MSRA NER dataset","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-12-01T05:02:11Z","updated_at":"2020-12-04T09:29:40Z","closed_at":"2020-12-01T07:25:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/940","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/940","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/940.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/940.patch","merged_at":"2020-12-01T07:25:53Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/940\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/940\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/939","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/939\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/939\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/939\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/939","id":753965405,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI5OTQwOTYz","number":939,"title":"add wisesight_sentiment","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-12-01T03:06:39Z","updated_at":"2020-12-02T04:52:38Z","closed_at":"2020-12-02T04:35:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/939","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/939","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/939.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/939.patch","merged_at":null},"body":"Add `wisesight_sentiment` Social media messages in Thai language with sentiment label (positive, neutral, negative, question)\r\n\r\nModel Card:\r\n---\r\nYAML tags:\r\nannotations_creators:\r\n- expert-generated\r\nlanguage_creators:\r\n- found\r\nlanguages:\r\n- th\r\nlicenses:\r\n- cc0-1.0\r\nmultilinguality:\r\n- monolingual\r\nsize_categories:\r\n- 10K Paper Link = https:\/\/arxiv.org\/pdf\/1802.08979.pdf \r\n> Github Link = https:\/\/github.com\/TellinaTool\/nl2bash\r\n\r\n\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/901\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/901\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/900","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/900\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/900\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/900\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/900","id":752214066,"node_id":"MDU6SXNzdWU3NTIyMTQwNjY=","number":900,"title":"datasets.load_dataset() custom chaching directory bug","user":{"login":"SapirWeissbuch","id":44585792,"node_id":"MDQ6VXNlcjQ0NTg1Nzky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44585792?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SapirWeissbuch","html_url":"https:\/\/github.com\/SapirWeissbuch","followers_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/followers","following_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/orgs","repos_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/repos","events_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2020-11-27T12:18:53Z","updated_at":"2020-11-29T22:48:53Z","closed_at":"2020-11-29T22:48:53Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\nI'm having issue with loading a dataset with a custom `cache_dir`. Despite specifying the output dir, it is still downloaded to \r\n `~\/.cache`.\r\n\r\n## Environment info\r\n- `datasets` version: 1.1.3\r\n- Platform: Linux-4.19.129-aufs-1-x86_64-with-debian-10.1\r\n- Python version: 3.7.3\r\n\r\n## The code I'm running:\r\n```python\r\nimport datasets\r\nfrom pathlib import Path\r\n\r\nvalidation_dataset = datasets.load_dataset(\"natural_questions\", split=\"validation[:5%]\", cache_dir=Path(\".\/data\")) \r\n```\r\n\r\n## The output:\r\n\r\n* The dataset is downloaded to my home directory's `.cache` \r\n* A new empty directory named \"`natural_questions` is created in the specified directory `.data`\r\n* `tree data` in the shell outputs:\r\n```\r\ndata\r\n\u2514\u2500\u2500 natural_questions\r\n \u2514\u2500\u2500 default\r\n \u2514\u2500\u2500 0.0.2\r\n3 directories, 0 files\r\n```\r\n\r\nThe output:\r\n```\r\nDownloading: 8.61kB [00:00, 5.11MB\/s] \r\nDownloading: 13.6kB [00:00, 7.89MB\/s] \r\nUsing custom data configuration default \r\nDownloading and preparing dataset natural_questions\/default (download: 41.97 GiB, generated: 92.95 GiB, post-processed: Unknown size, total: 134.92 GiB) to .\/data\/natural_questions\/default\/0.0.2\/867dbbaf9137c1b8\r\n3ecb19f5eb80559e1002ea26e702c6b919cfa81a17a8c531... \r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 13.6k\/13.6k [00:00<00:00, 1.51MB\/s] \r\nDownloading: 7%|\u2588\u2588\u2588\u258e | 6.70G\/97.4G [03:46<1:37:05, 15.6MB\/s]\r\n```\r\n\r\n## Expected behaviour:\r\nThe dataset \"Natural Questions\" should be downloaded to the directory \".\/data\"\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/900\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/900\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/899","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/899\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/899\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/899\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/899","id":752191227,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI4NTYzNzYz","number":899,"title":"Allow arrow based builder in auto dummy data generation","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-27T11:39:38Z","updated_at":"2020-11-27T13:30:09Z","closed_at":"2020-11-27T13:30:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/899","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/899","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/899.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/899.patch","merged_at":"2020-11-27T13:30:08Z"},"body":"Following #898 I added support for arrow based builder for the auto dummy data generator","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/899\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/899\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/898","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/898\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/898\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/898\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/898","id":752148284,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI4NTI4MDY1","number":898,"title":"Adding SQA dataset","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-27T10:29:18Z","updated_at":"2020-12-15T12:54:40Z","closed_at":"2020-12-15T12:54:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/898","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/898","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/898.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/898.patch","merged_at":null},"body":"As discussed in #880 \r\n\r\nSeems like automatic dummy-data generation doesn't work if the builder is a `ArrowBasedBuilder`, do you think you could take a look @lhoestq ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/898\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/898\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/897","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/897\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/897\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/897\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/897","id":752100256,"node_id":"MDU6SXNzdWU3NTIxMDAyNTY=","number":897,"title":"Dataset viewer issues","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-11-27T09:14:34Z","updated_at":"2021-10-31T09:12:01Z","closed_at":"2021-10-31T09:12:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I was looking through the dataset viewer and I like it a lot. Version numbers, citation information, everything's there! I've spotted a few issues\/bugs though:\r\n\r\n- the URL is still under `nlp`, perhaps an alias for `datasets` can be made\r\n- when I remove a **feature** (and the feature list is empty), I get an error. This is probably expected, but perhaps a better error message can be shown to the user\r\n\r\n```bash\r\nIndexError: list index out of range\r\nTraceback:\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/ScriptRunner.py\", line 322, in _run_script\r\n exec(code, module.__dict__)\r\nFile \"\/home\/sasha\/nlp-viewer\/run.py\", line 316, in \r\n st.table(style)\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/DeltaGenerator.py\", line 122, in wrapped_method\r\n return dg._enqueue_new_element_delta(marshall_element, delta_type, last_index)\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/DeltaGenerator.py\", line 367, in _enqueue_new_element_delta\r\n rv = marshall_element(msg.delta.new_element)\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/DeltaGenerator.py\", line 120, in marshall_element\r\n return method(dg, element, *args, **kwargs)\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/DeltaGenerator.py\", line 2944, in table\r\n data_frame_proto.marshall_data_frame(data, element.table)\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/elements\/data_frame_proto.py\", line 54, in marshall_data_frame\r\n _marshall_styles(proto_df.style, df, styler)\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/elements\/data_frame_proto.py\", line 73, in _marshall_styles\r\n translated_style = styler._translate()\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/pandas\/io\/formats\/style.py\", line 351, in _translate\r\n * (len(clabels[0]) - len(hidden_columns))\r\n```\r\n\r\n- there seems to be **an encoding issue** in the default view, the dataset examples are shown as raw monospace text, without a decent encoding. That makes it hard to read for languages that use a lot of special characters. Take for instance the [cs-en WMT19 set](https:\/\/huggingface.co\/nlp\/viewer\/?dataset=wmt19&config=cs-en). This problem goes away when you enable \"List view\", because then some syntax highlighteris used, and the special characters are coded correctly.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/897\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/897\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/896","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/896\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/896\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/896\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/896","id":751834265,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI4MjcyMjc0","number":896,"title":"Add template and documentation for dataset card","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-26T21:30:25Z","updated_at":"2020-11-28T01:10:15Z","closed_at":"2020-11-28T01:10:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/896","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/896","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/896.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/896.patch","merged_at":"2020-11-28T01:10:14Z"},"body":"This PR adds a template for dataset cards, as well as a guide to filling out the template and a completed example for the ELI5 dataset, building on the work of @mcmillanmajora \r\n\r\nNew pull requests adding datasets should now have a README.md file which serves both to hold the tags we will have to index the datasets and as a data statement.\r\n\r\nThe template is designed to be pretty extensive. The idea is that the person who uploads the dataset should put in all the basic information (at least the Dataset Description section) and whatever else they feel comfortable adding and leave the `[More Information Needed]` annotation everywhere else as a placeholder.\r\n\r\nWe will then work with @mcmillanmajora to involve the data authors more directly in filling out the remaining information.\r\n\r\nDirect links to:\r\n- [Documentation](https:\/\/github.com\/yjernite\/datasets\/blob\/add_dataset_card_doc\/templates\/README_guide.md)\r\n- [Empty template](https:\/\/github.com\/yjernite\/datasets\/blob\/add_dataset_card_doc\/templates\/README.md)\r\n- [ELI5 example](https:\/\/github.com\/yjernite\/datasets\/blob\/add_dataset_card_doc\/datasets\/eli5\/README.md)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/896\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/896\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/895","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/895\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/895\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/895\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/895","id":751782295,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI4MjMyMjU3","number":895,"title":"Better messages regarding split naming","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-26T18:55:46Z","updated_at":"2020-11-27T13:31:00Z","closed_at":"2020-11-27T13:30:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/895","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/895","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/895.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/895.patch","merged_at":"2020-11-27T13:30:59Z"},"body":"I made explicit the error message when a bad split name is used.\r\n\r\nAlso I wanted to allow the `-` symbol for split names but actually this symbol is used to name the arrow files `{dataset_name}-{dataset_split}.arrow` so we should probably keep it this way, i.e. not allowing the `-` symbol in split names. Moreover in the future we might want to use `{dataset_name}-{dataset_split}-{shard_id}_of_{n_shards}.arrow` and reuse the `-` symbol.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/895\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/895\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/894","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/894\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/894\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/894\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/894","id":751734905,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI4MTkzNzQy","number":894,"title":"Allow several tags sets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-26T17:04:13Z","updated_at":"2021-05-05T18:24:17Z","closed_at":"2020-11-27T20:15:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/894","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/894","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/894.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/894.patch","merged_at":null},"body":"Hi !\r\n\r\nCurrently we have three dataset cards : snli, cnn_dailymail and allocine.\r\nFor each one of those datasets a set of tag is defined. The set of tags contains fields like `multilinguality`, `task_ids`, `licenses` etc.\r\n\r\nFor certain datasets like `glue` for example, there exist several configurations: `sst2`, `mnli` etc. Therefore we should define one set of tags per configuration. However the current format used for tags only supports one set of tags per dataset.\r\n\r\nIn this PR I propose a simple change in the yaml format used for tags to allow for several sets of tags.\r\n\r\nLet me know what you think, especially @julien-c let me know if it's good for you since it's going to be parsed by moon-landing","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/894\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/894\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/893","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/893\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/893\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/893\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/893","id":751703696,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI4MTY4NDgx","number":893,"title":"add metrec: arabic poetry dataset","user":{"login":"zaidalyafeai","id":15667714,"node_id":"MDQ6VXNlcjE1NjY3NzE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15667714?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zaidalyafeai","html_url":"https:\/\/github.com\/zaidalyafeai","followers_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/followers","following_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/orgs","repos_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/repos","events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zaidalyafeai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-11-26T16:10:16Z","updated_at":"2020-12-01T16:24:55Z","closed_at":"2020-12-01T15:15:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/893","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/893","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/893.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/893.patch","merged_at":"2020-12-01T15:15:07Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/893\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/893\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/892","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/892\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/892\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/892\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/892","id":751658262,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI4MTMxNTE1","number":892,"title":"Add a few datasets of reference in the documentation","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-26T15:02:39Z","updated_at":"2020-11-27T18:08:45Z","closed_at":"2020-11-27T18:08:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/892","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/892","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/892.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/892.patch","merged_at":"2020-11-27T18:08:44Z"},"body":"I started making a small list of various datasets of reference in the documentation.\r\nSince many datasets share a lot in common I think it's good to have a list of datasets scripts to get some inspiration from.\r\n\r\nLet me know what you think, and if you have ideas of other datasets that we may add to this list, please let me know.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/892\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/892\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/891","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/891\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/891\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/891\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/891","id":751576869,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI4MDY1MTQ3","number":891,"title":"gitignore .python-version","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-26T13:05:58Z","updated_at":"2020-11-26T13:28:27Z","closed_at":"2020-11-26T13:28:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/891","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/891","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/891.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/891.patch","merged_at":"2020-11-26T13:28:26Z"},"body":"ignore `.python-version` added by `pyenv`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/891\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/891\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/890","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/890\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/890\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/890\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/890","id":751534050,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI4MDI5NjA3","number":890,"title":"Add LER","user":{"login":"JoelNiklaus","id":3775944,"node_id":"MDQ6VXNlcjM3NzU5NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3775944?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JoelNiklaus","html_url":"https:\/\/github.com\/JoelNiklaus","followers_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/followers","following_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/orgs","repos_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/repos","events_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-11-26T11:58:23Z","updated_at":"2020-12-01T13:33:35Z","closed_at":"2020-12-01T13:26:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/890","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/890","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/890.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/890.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/890\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/890\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/889","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/889\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/889\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/889\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/889","id":751115691,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI3NjkwODE2","number":889,"title":"Optional per-dataset default config name","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-25T21:02:30Z","updated_at":"2020-11-30T17:27:33Z","closed_at":"2020-11-30T17:27:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/889","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/889","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/889.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/889.patch","merged_at":"2020-11-30T17:27:27Z"},"body":"This PR adds a `DEFAULT_CONFIG_NAME` class attribute to `DatasetBuilder`. This allows a dataset to have a specified default config name when a dataset has more than one config but the user does not specify it. For example, after defining `DEFAULT_CONFIG_NAME = \"combined\"` in PolyglotNER, a user can now do the following:\r\n\r\n```python\r\nds = load_dataset(\"polyglot_ner\")\r\n```\r\nwhich is equivalent to,\r\n```python\r\nds = load_dataset(\"polyglot_ner\", \"combined\")\r\n```\r\nIn effect (for this particular dataset configuration), this means that if the user doesn't specify a language, they are given the combined dataset including all languages.\r\n\r\nSince it doesn't always make sense to have a default config, this feature is opt-in. If `DEFAULT_CONFIG_NAME` is not defined and a user does not pass a config for a dataset with multiple configs available, a ValueError is raised like usual.\r\n\r\nLet me know what you think about this approach @lhoestq @thomwolf and I'll add some documentation and define a default for some of our existing datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/889\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/889\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/888","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/888\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/888\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/888\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/888","id":750944422,"node_id":"MDU6SXNzdWU3NTA5NDQ0MjI=","number":888,"title":"Nested lists are zipped unexpectedly","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-25T16:07:46Z","updated_at":"2020-11-25T17:30:39Z","closed_at":"2020-11-25T17:30:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I might misunderstand something, but I expect that if I define:\r\n```python\r\n\"top\": datasets.features.Sequence({\r\n \"middle\": datasets.features.Sequence({\r\n \"bottom\": datasets.Value(\"int32\")\r\n })\r\n})\r\n```\r\n\r\nAnd I then create an example:\r\n```python\r\nyield 1, {\r\n \"top\": [{\r\n \"middle\": [\r\n {\"bottom\": 1},\r\n {\"bottom\": 2}\r\n ]\r\n }]\r\n}\r\n```\r\n\r\nI then load my dataset:\r\n```python\r\ntrain = load_dataset(\"my dataset\")[\"train\"]\r\n```\r\n\r\nand expect to be able to access `data[0][\"top\"][0][\"middle\"][0]`.\r\n\r\nThat is not the case. Here is `data[0]` as JSON:\r\n\r\n```json\r\n{\"top\": {\"middle\": [{\"bottom\": [1, 2]}]}}\r\n```\r\n\r\nClearly different than the thing I inputted.\r\n```json\r\n{\"top\": [{\"middle\": [{\"bottom\": 1},{\"bottom\": 2}]}]}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/888\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/888\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/887","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/887\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/887\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/887\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/887","id":750868831,"node_id":"MDU6SXNzdWU3NTA4Njg4MzE=","number":887,"title":"pyarrow.lib.ArrowNotImplementedError: MakeBuilder: cannot construct builder for type extension","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":14,"created_at":"2020-11-25T14:32:21Z","updated_at":"2021-09-09T17:03:40Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I set up a new dataset, with a sequence of arrays (really, I want to have an array of (None, 137, 2), and the first dimension is dynamic) \r\n\r\n```python\r\n def _info(self):\r\n return datasets.DatasetInfo(\r\n description=_DESCRIPTION,\r\n # This defines the different columns of the dataset and their types\r\n features=datasets.Features(\r\n {\r\n \"pose\": datasets.features.Sequence(datasets.features.Array2D(shape=(137, 2), dtype=\"float32\"))\r\n }\r\n ),\r\n homepage=_HOMEPAGE,\r\n citation=_CITATION,\r\n )\r\n def _generate_examples(self):\r\n \"\"\" Yields examples. \"\"\"\r\n\r\n yield 1, {\r\n \"pose\": [np.zeros(shape=(137, 2), dtype=np.float32)]\r\n }\r\n```\r\n\r\nBut this doesn't work -\r\n> pyarrow.lib.ArrowNotImplementedError: MakeBuilder: cannot construct builder for type extension","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/887\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/887\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/886","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/886\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/886\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/886\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/886","id":750829314,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI3NDU1MDU5","number":886,"title":"Fix wikipedia custom config","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-25T13:44:12Z","updated_at":"2021-06-25T05:24:16Z","closed_at":"2020-11-25T15:42:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/886","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/886","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/886.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/886.patch","merged_at":"2020-11-25T15:42:13Z"},"body":"It should be possible to use the wikipedia dataset with any `language` and `date`.\r\nHowever it was not working as noticed in #784 . Indeed the custom wikipedia configurations were not enabled for some reason.\r\n\r\nI fixed that and was able to run \r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset(\".\/datasets\/wikipedia\", language=\"zh\", date=\"20201120\", beam_runner='DirectRunner')\r\n```\r\n\r\ncc @stvhuang @SamuelCahyawijaya\r\n\r\nFix #784","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/886\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/886\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/885","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/885\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/885\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/885\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/885","id":750789052,"node_id":"MDU6SXNzdWU3NTA3ODkwNTI=","number":885,"title":"Very slow cold-start","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-25T12:47:58Z","updated_at":"2021-01-13T11:31:25Z","closed_at":"2021-01-13T11:31:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI expect when importing `datasets` that nothing major happens in the background, and so the import should be insignificant.\r\nWhen I load a metric, or a dataset, its fine that it takes time.\r\n\r\nThe following ranges from 3 to 9 seconds:\r\n```\r\npython -m timeit -n 1 -r 1 'from datasets import load_dataset'\r\n```\r\n\r\nedit:\r\nsorry for the mis-tag, not sure how I added it.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/885\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/885\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/884","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/884\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/884\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/884\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/884","id":749862034,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI2NjA5MDc1","number":884,"title":"Auto generate dummy data","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-24T16:31:34Z","updated_at":"2020-11-26T14:18:47Z","closed_at":"2020-11-26T14:18:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/884","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/884","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/884.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/884.patch","merged_at":"2020-11-26T14:18:46Z"},"body":"When adding a new dataset to the library, dummy data creation can take some time.\r\nTo make things easier I added a command line tool that automatically generates dummy data when possible.\r\n\r\nThe tool only supports certain data files types: txt, csv, tsv, jsonl, json and xml.\r\n\r\nHere are some examples:\r\n```\r\npython datasets-cli dummy_data .\/datasets\/snli --auto_generate\r\npython datasets-cli dummy_data .\/datasets\/squad --auto_generate --json_field data\r\npython datasets-cli dummy_data .\/datasets\/iwslt2017 --auto_generate --xml_tag seg --match_text_files \"train*\" --n_lines 15\r\n# --xml_tag seg => each sample corresponds to a \"seg\" tag in the xml tree\r\n# --match_text_files \"train*\" => also match text files that don't have a proper text file extension (no suffix like \".txt\" for example)\r\n# --n_lines 15 => some text files have headers so we have to use at least 15 lines\r\n```\r\n\r\nand here is the command usage:\r\n\r\n```\r\nusage: datasets-cli [] dummy_data [-h] [--auto_generate]\r\n [--n_lines N_LINES]\r\n [--json_field JSON_FIELD]\r\n [--xml_tag XML_TAG]\r\n [--match_text_files MATCH_TEXT_FILES]\r\n [--keep_uncompressed]\r\n [--cache_dir CACHE_DIR]\r\n path_to_dataset\r\n\r\npositional arguments:\r\npath_to_dataset Path to the dataset (example: .\/datasets\/squad)\r\n\r\noptional arguments:\r\n-h, --help show this help message and exit\r\n--auto_generate Try to automatically generate dummy data\r\n--n_lines N_LINES Number of lines or samples to keep when auto-\r\n generating dummy data\r\n--json_field JSON_FIELD\r\n Optional, json field to read the data from when auto-\r\n generating dummy data. In the json data files, this\r\n field must point to a list of samples as json objects\r\n (ex: the 'data' field for squad-like files)\r\n--xml_tag XML_TAG Optional, xml tag name of the samples inside the xml\r\n files when auto-generating dummy data.\r\n--match_text_files MATCH_TEXT_FILES\r\n Optional, a comma separated list of file patterns that\r\n looks for line-by-line text files other than *.txt or\r\n *.csv. Example: --match_text_files *.label\r\n--keep_uncompressed Don't compress the dummy data folders when auto-\r\n generating dummy data. Useful for debugging for to do\r\n manual adjustements before compressing.\r\n--cache_dir CACHE_DIR\r\n Cache directory to download and cache files when auto-\r\n generating dummy data\r\n```\r\n\r\nThe command generates all the necessary `dummy_data.zip` files (one per config).\r\n\r\nHow it works:\r\n- it runs the split_generators() method of the dataset script to download the original data files\r\n- when downloading it records a mapping between the downloaded files and the corresponding expected dummy data files paths\r\n- then for each data file it creates the dummy data file keeping only the first samples (the strategy depends on the type of file)\r\n- finally it compresses the dummy data folders into dummy_zip files ready for dataset tests\r\n\r\nLet me know if that makes sense or if you have ideas to improve this tool !\r\n\r\nI also added a unit test.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/884\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/884\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/883","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/883\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/883\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/883\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/883","id":749750801,"node_id":"MDU6SXNzdWU3NDk3NTA4MDE=","number":883,"title":"Downloading\/caching only a part of a datasets' dataset.","user":{"login":"SapirWeissbuch","id":44585792,"node_id":"MDQ6VXNlcjQ0NTg1Nzky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44585792?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SapirWeissbuch","html_url":"https:\/\/github.com\/SapirWeissbuch","followers_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/followers","following_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/orgs","repos_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/repos","events_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SapirWeissbuch\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-24T14:25:18Z","updated_at":"2020-11-27T13:51:55Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI want to use the validation data *only* (of natural question).\r\nI don't want to have the whole dataset cached in my machine, just the dev set.\r\nIs this possible? I can't find a way to do it in the docs.\r\n\r\nThank you,\r\nSapir","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/883\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/883\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/882","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/882\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/882\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/882\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/882","id":749662188,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI2NDQyMjA2","number":882,"title":"Update README.md","user":{"login":"vaibhavad","id":32997732,"node_id":"MDQ6VXNlcjMyOTk3NzMy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32997732?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vaibhavad","html_url":"https:\/\/github.com\/vaibhavad","followers_url":"https:\/\/api.github.com\/users\/vaibhavad\/followers","following_url":"https:\/\/api.github.com\/users\/vaibhavad\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vaibhavad\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vaibhavad\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vaibhavad\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vaibhavad\/orgs","repos_url":"https:\/\/api.github.com\/users\/vaibhavad\/repos","events_url":"https:\/\/api.github.com\/users\/vaibhavad\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vaibhavad\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-24T12:23:52Z","updated_at":"2021-01-29T10:41:07Z","closed_at":"2021-01-29T10:41:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/882","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/882","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/882.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/882.patch","merged_at":"2021-01-29T10:41:06Z"},"body":"\"no label\" is \"-\" in the original dataset but \"-1\" in Huggingface distribution.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/882\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/882\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/881","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/881\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/881\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/881\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/881","id":749548107,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI2MzQ5MDM2","number":881,"title":"Use GCP download url instead of tensorflow custom download for boolq","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-24T09:47:11Z","updated_at":"2020-11-24T10:12:34Z","closed_at":"2020-11-24T10:12:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/881","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/881","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/881.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/881.patch","merged_at":"2020-11-24T10:12:33Z"},"body":"BoolQ is a dataset that used tf.io.gfile.copy to download the file from a GCP bucket.\r\nIt prevented the dataset to be downloaded twice because of a FileAlreadyExistsError.\r\nEven though the error could be fixed by providing `overwrite=True` to the tf.io.gfile.copy call, I changed the script to use GCP download urls and use regular downloads instead and remove the tensorflow dependency.\r\n\r\nFix #875 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/881\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/881\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/880","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/880\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/880\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/880\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/880","id":748949606,"node_id":"MDU6SXNzdWU3NDg5NDk2MDY=","number":880,"title":"Add SQA","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-23T16:31:55Z","updated_at":"2020-12-23T13:58:24Z","closed_at":"2020-12-23T13:58:23Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** SQA (Sequential Question Answering) by Microsoft. \r\n- **Description:** The SQA dataset was created to explore the task of answering sequences of inter-related questions on HTML tables. It has 6,066 sequences with 17,553 questions in total.\r\n- **Paper:** https:\/\/www.microsoft.com\/en-us\/research\/publication\/search-based-neural-structured-learning-sequential-question-answering\/\r\n- **Data:** https:\/\/www.microsoft.com\/en-us\/download\/details.aspx?id=54253\r\n- **Motivation:** currently, the [Tapas](https:\/\/ai.googleblog.com\/2020\/04\/using-neural-networks-to-find-answers.html) algorithm by Google AI is being added to the Transformers library (see https:\/\/github.com\/huggingface\/transformers\/pull\/8113). It would be great to use that model in combination with this dataset, on which it achieves SOTA results (average question accuracy of 0.71).\r\n\r\nNote 1: this dataset actually consists of 2 types of files: \r\n1) TSV files, containing the questions, answer coordinates and answer texts (for training, dev and test)\r\n2) a folder of csv files, which contain the actual tabular data\r\n\r\nNote 2: if you download the dataset straight from the download link above, then you will see that the `answer_coordinates` and `answer_text` columns are string lists of string tuples and strings respectively, which is not ideal. It would be better to make them true Python lists of tuples and strings respectively (using `ast.literal_eval`), before uploading them to the HuggingFace hub.\r\n\r\nAdding this would be great! Then we could possibly also add [WTQ (WikiTable Questions)](https:\/\/github.com\/ppasupat\/WikiTableQuestions) and [TabFact (Tabular Fact Checking)](https:\/\/github.com\/wenhuchen\/Table-Fact-Checking) on which TAPAS also achieves state-of-the-art results. Note that the TAPAS algorithm requires these datasets to first be converted into the SQA format.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/880\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/880\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/879","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/879\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/879\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/879\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/879","id":748848847,"node_id":"MDU6SXNzdWU3NDg4NDg4NDc=","number":879,"title":"boolq does not load ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-23T14:28:28Z","updated_at":"2020-11-27T13:51:11Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am getting these errors trying to load boolq thanks \r\n\r\nTraceback (most recent call last):\r\n File \"test.py\", line 5, in \r\n data = AutoTask().get(\"boolq\").get_dataset(\"train\", n_obs=10)\r\n File \"\/remote\/idiap.svm\/user.active\/rkarimi\/dev\/internship\/seq2seq\/tasks\/tasks.py\", line 42, in get_dataset\r\n dataset = self.load_dataset(split=split)\r\n File \"\/remote\/idiap.svm\/user.active\/rkarimi\/dev\/internship\/seq2seq\/tasks\/tasks.py\", line 38, in load_dataset\r\n return datasets.load_dataset(self.task.name, split=split)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 531, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \" \/idiap\/home\/rkarimi\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/boolq\/2987db1f15deaa19500ae24de560eabeaf1f8ef51df88c0470beeec72943bf11\/boolq.py\", line 74, in _split_generators\r\n downloaded_files = dl_manager.download_custom(urls_to_download, tf.io.gfile.copy)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 150, in download_custom\r\n get_from_cache(url, cache_dir=cache_dir, local_files_only=True, use_etag=False)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 472, in get_from_cache\r\n f\"Cannot find the requested files in the cached path at {cache_path} and outgoing traffic has been\"\r\nFileNotFoundError: Cannot find the requested files in the cached path at \/idiap\/home\/rkarimi\/.cache\/huggingface\/datasets\/eaee069e38f6ceaa84de02ad088c34e63ec97671f2cd1910ddb16b10dc60808c and outgoing traffic has been disabled. To enable file online look-ups, set 'local_files_only' to False.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/879\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/879\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/878","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/878\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/878\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/878\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/878","id":748621981,"node_id":"MDU6SXNzdWU3NDg2MjE5ODE=","number":878,"title":"Loading Data From S3 Path in Sagemaker","user":{"login":"mahesh1amour","id":42795522,"node_id":"MDQ6VXNlcjQyNzk1NTIy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42795522?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mahesh1amour","html_url":"https:\/\/github.com\/mahesh1amour","followers_url":"https:\/\/api.github.com\/users\/mahesh1amour\/followers","following_url":"https:\/\/api.github.com\/users\/mahesh1amour\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mahesh1amour\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mahesh1amour\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mahesh1amour\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mahesh1amour\/orgs","repos_url":"https:\/\/api.github.com\/users\/mahesh1amour\/repos","events_url":"https:\/\/api.github.com\/users\/mahesh1amour\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mahesh1amour\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2020-11-23T09:17:22Z","updated_at":"2020-12-23T09:53:08Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In Sagemaker Im tring to load the data set from S3 path as follows\r\n\r\n`train_path = 's3:\/\/xxxxxxxxxx\/xxxxxxxxxx\/train.csv'\r\n valid_path = 's3:\/\/xxxxxxxxxx\/xxxxxxxxxx\/validation.csv'\r\n test_path = 's3:\/\/xxxxxxxxxx\/xxxxxxxxxx\/test.csv'\r\n \r\n data_files = {}\r\n data_files[\"train\"] = train_path\r\n data_files[\"validation\"] = valid_path\r\n data_files[\"test\"] = test_path\r\n extension = train_path.split(\".\")[-1]\r\n datasets = load_dataset(extension, data_files=data_files, s3_enabled=True)\r\n print(datasets)`\r\n\r\n\r\nI getting an error of\r\n\r\n`algo-1-7plil_1 | File \"main.py\", line 21, in \r\nalgo-1-7plil_1 | datasets = load_dataset(extension, data_files=data_files)\r\nalgo-1-7plil_1 | File \"\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 603, in load_dataset\r\nalgo-1-7plil_1 | **config_kwargs,\r\nalgo-1-7plil_1 | File \"\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 155, in __init__\r\nalgo-1-7plil_1 | **config_kwargs,\r\nalgo-1-7plil_1 | File \"\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 305, in _create_builder_config\r\nalgo-1-7plil_1 | m.update(str(os.path.getmtime(data_file)))\r\nalgo-1-7plil_1 | File \"\/opt\/conda\/lib\/python3.6\/genericpath.py\", line 55, in getmtime\r\nalgo-1-7plil_1 | return os.stat(filename).st_mtime\r\nalgo-1-7plil_1 | FileNotFoundError: [Errno 2] No such file or directory: 's3:\/\/lsmv-sagemaker\/pubmedbert\/test.csv`\r\n\r\nBut when im trying with pandas , it is able to load from S3\r\n\r\nDoes the datasets library support S3 path to load","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/878\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/878\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/877","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/877\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/877\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/877\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/877","id":748234438,"node_id":"MDU6SXNzdWU3NDgyMzQ0Mzg=","number":877,"title":"DataLoader(datasets) become more and more slowly within iterations","user":{"login":"shexuan","id":25664170,"node_id":"MDQ6VXNlcjI1NjY0MTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25664170?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shexuan","html_url":"https:\/\/github.com\/shexuan","followers_url":"https:\/\/api.github.com\/users\/shexuan\/followers","following_url":"https:\/\/api.github.com\/users\/shexuan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shexuan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shexuan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shexuan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shexuan\/orgs","repos_url":"https:\/\/api.github.com\/users\/shexuan\/repos","events_url":"https:\/\/api.github.com\/users\/shexuan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shexuan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-22T12:41:10Z","updated_at":"2020-11-29T15:45:12Z","closed_at":"2020-11-29T15:45:12Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello, when I for loop my dataloader, the loading speed is becoming more and more slowly!\r\n```\r\ndataset = load_from_disk(dataset_path) # around 21,000,000 lines\r\n\r\nlineloader = tqdm(DataLoader(dataset, batch_size=1))\r\nfor idx, line in enumerate(lineloader):\r\n # do some thing for each line\r\n```\r\nIn the begining, the loading speed is around 2000it\/s, but after 1 minutes later, the speed is much slower, just around 800it\/s.\r\n\r\nAnd when I set `num_workers=4` in DataLoader, the loading speed is much lower, just 130it\/s.\r\n\r\nCould you please help me with this problem?\r\nThanks a lot!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/877\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/877\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/876","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/876\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/876\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/876\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/876","id":748195104,"node_id":"MDU6SXNzdWU3NDgxOTUxMDQ=","number":876,"title":"imdb dataset cannot be loaded ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-11-22T08:24:43Z","updated_at":"2021-11-26T11:07:16Z","closed_at":"2020-12-24T17:38:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am trying to load the imdb train dataset\r\n\r\n`dataset = datasets.load_dataset(\"imdb\", split=\"train\")`\r\n\r\ngetting following errors, thanks for your help \r\n```\r\nTraceback (most recent call last): \r\n File \"\", line 1, in \r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 558, in _download_and_prepare\r\n verify_splits(self.info.splits, split_dict)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/info_utils.py\", line 73, in verify_splits\r\n raise NonMatchingSplitsSizesError(str(bad_splits))\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='test', num_bytes=32660064, num_examples=25000, dataset_name='imdb'), 'recorded': SplitInfo(name='test', num_bytes=26476338, num_examples=20316, dataset_name='imdb')}, {'expected': SplitInfo(name='train', num_bytes=33442202, num_examples=25000, dataset_name='imdb'), 'recorded': SplitInfo(name='train', num_bytes=0, num_examples=0, dataset_name='imdb')}, {'expected': SplitInfo(name='unsupervised', num_bytes=67125548, num_examples=50000, dataset_name='imdb'), 'recorded': SplitInfo(name='unsupervised', num_bytes=0, num_examples=0, dataset_name='imdb')}]\r\n>>> dataset = datasets.load_dataset(\"imdb\", split=\"train\")\r\n\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/876\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/876\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/875","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/875\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/875\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/875\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/875","id":748194311,"node_id":"MDU6SXNzdWU3NDgxOTQzMTE=","number":875,"title":"bug in boolq dataset loading","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-22T08:18:34Z","updated_at":"2020-11-24T10:12:33Z","closed_at":"2020-11-24T10:12:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am trying to load boolq dataset:\r\n\r\n```\r\nimport datasets\r\ndatasets.load_dataset(\"boolq\")\r\n```\r\n\r\nI am getting the following errors, thanks for your help \r\n\r\n```\r\n>>> import datasets\r\n2020-11-22 09:16:30.070332: W tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory\r\n2020-11-22 09:16:30.070389: I tensorflow\/stream_executor\/cuda\/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine.\r\n>>> datasets.load_dataset(\"boolq\")\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home\/datasets\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home\/datasets\r\nUsing custom data configuration default\r\nDownloading and preparing dataset boolq\/default (download: 8.36 MiB, generated: 7.47 MiB, post-processed: Unknown size, total: 15.83 MiB) to \/idiap\/temp\/rkarimi\/cache_home\/datasets\/boolq\/default\/0.1.0\/2987db1f15deaa19500ae24de560eabeaf1f8ef51df88c0470beeec72943bf11...\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home\/datasets\r\ncahce dir \/idiap\/temp\/rkarimi\/cache_home\/datasets\/downloads\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 531, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \" \/idiap\/home\/rkarimi\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/boolq\/2987db1f15deaa19500ae24de560eabeaf1f8ef51df88c0470beeec72943bf11\/boolq.py\", line 74, in _split_generators\r\n downloaded_files = dl_manager.download_custom(urls_to_download, tf.io.gfile.copy)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 149, in download_custom\r\n custom_download(url, path)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/tensorflow\/python\/lib\/io\/file_io.py\", line 516, in copy_v2\r\n compat.path_to_bytes(src), compat.path_to_bytes(dst), overwrite)\r\ntensorflow.python.framework.errors_impl.AlreadyExistsError: file already exists\r\n\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/875\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/875\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/874","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/874\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/874\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/874\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/874","id":748193140,"node_id":"MDU6SXNzdWU3NDgxOTMxNDA=","number":874,"title":"trec dataset unavailable ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-22T08:09:36Z","updated_at":"2020-11-27T13:56:42Z","closed_at":"2020-11-27T13:56:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nwhen I try to load the trec dataset I am getting these errors, thanks for your help\r\n\r\n`datasets.load_dataset(\"trec\", split=\"train\")\r\n`\r\n```\r\n File \"\", line 1, in \r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 531, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \" \/idiap\/home\/rkarimi\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/trec\/ca4248481ad244f235f4cf277186cad2ee8769f975119a2bbfc41b8932b88bd7\/trec.py\", line 140, in _split_generators\r\n dl_files = dl_manager.download_and_extract(_URLs)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 254, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 179, in download\r\n num_proc=download_config.num_proc,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 163, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 477, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/cogcomp.org\/Data\/QA\/QC\/train_5500.label\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/874\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/874\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/873","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/873\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/873\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/873\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/873","id":747959523,"node_id":"MDU6SXNzdWU3NDc5NTk1MjM=","number":873,"title":"load_dataset('cnn_dalymail', '3.0.0') gives a 'Not a directory' error","user":{"login":"vishal-burman","id":19861874,"node_id":"MDQ6VXNlcjE5ODYxODc0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19861874?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vishal-burman","html_url":"https:\/\/github.com\/vishal-burman","followers_url":"https:\/\/api.github.com\/users\/vishal-burman\/followers","following_url":"https:\/\/api.github.com\/users\/vishal-burman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vishal-burman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vishal-burman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vishal-burman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vishal-burman\/orgs","repos_url":"https:\/\/api.github.com\/users\/vishal-burman\/repos","events_url":"https:\/\/api.github.com\/users\/vishal-burman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vishal-burman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-11-21T06:30:45Z","updated_at":"2020-12-03T11:04:15Z","closed_at":"2020-11-22T12:18:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('cnn_dailymail', '3.0.0')\r\n```\r\nStack trace:\r\n```\r\n---------------------------------------------------------------------------\r\n\r\nNotADirectoryError Traceback (most recent call last)\r\n\r\n in ()\r\n 1 from datasets import load_dataset\r\n----> 2 dataset = load_dataset('cnn_dailymail', '3.0.0')\r\n\r\n5 frames\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, save_infos, script_version, **config_kwargs)\r\n 608 download_config=download_config,\r\n 609 download_mode=download_mode,\r\n--> 610 ignore_verifications=ignore_verifications,\r\n 611 )\r\n 612 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n 513 if not downloaded_from_gcs:\r\n 514 self._download_and_prepare(\r\n--> 515 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 516 )\r\n 517 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 568 split_dict = SplitDict(dataset_name=self.name)\r\n 569 split_generators_kwargs = self._make_split_generators_kwargs(prepare_split_kwargs)\r\n--> 570 split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n 571 \r\n 572 # Checksums verification\r\n\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/cnn_dailymail\/0128610a44e10f25b4af6689441c72af86205282d26399642f7db38fa7535602\/cnn_dailymail.py in _split_generators(self, dl_manager)\r\n 252 def _split_generators(self, dl_manager):\r\n 253 dl_paths = dl_manager.download_and_extract(_DL_URLS)\r\n--> 254 train_files = _subset_filenames(dl_paths, datasets.Split.TRAIN)\r\n 255 # Generate shared vocabulary\r\n 256 \r\n\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/cnn_dailymail\/0128610a44e10f25b4af6689441c72af86205282d26399642f7db38fa7535602\/cnn_dailymail.py in _subset_filenames(dl_paths, split)\r\n 153 else:\r\n 154 logging.fatal(\"Unsupported split: %s\", split)\r\n--> 155 cnn = _find_files(dl_paths, \"cnn\", urls)\r\n 156 dm = _find_files(dl_paths, \"dm\", urls)\r\n 157 return cnn + dm\r\n\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/cnn_dailymail\/0128610a44e10f25b4af6689441c72af86205282d26399642f7db38fa7535602\/cnn_dailymail.py in _find_files(dl_paths, publisher, url_dict)\r\n 132 else:\r\n 133 logging.fatal(\"Unsupported publisher: %s\", publisher)\r\n--> 134 files = sorted(os.listdir(top_dir))\r\n 135 \r\n 136 ret_files = []\r\n\r\nNotADirectoryError: [Errno 20] Not a directory: '\/root\/.cache\/huggingface\/datasets\/downloads\/1bc05d24fa6dda2468e83a73cf6dc207226e01e3c48a507ea716dc0421da583b\/cnn\/stories'\r\n```\r\nI have ran the code on Google Colab","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/873\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/873\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/872","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/872\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/872\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/872\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/872","id":747653697,"node_id":"MDExOlB1bGxSZXF1ZXN0NTI0ODM4NjEx","number":872,"title":"Add IndicGLUE dataset and Metrics","user":{"login":"sumanthd17","id":28291870,"node_id":"MDQ6VXNlcjI4MjkxODcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28291870?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sumanthd17","html_url":"https:\/\/github.com\/sumanthd17","followers_url":"https:\/\/api.github.com\/users\/sumanthd17\/followers","following_url":"https:\/\/api.github.com\/users\/sumanthd17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sumanthd17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sumanthd17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sumanthd17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sumanthd17\/orgs","repos_url":"https:\/\/api.github.com\/users\/sumanthd17\/repos","events_url":"https:\/\/api.github.com\/users\/sumanthd17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sumanthd17\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-20T17:09:34Z","updated_at":"2020-11-25T17:01:11Z","closed_at":"2020-11-25T15:26:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/872","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/872","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/872.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/872.patch","merged_at":"2020-11-25T15:26:07Z"},"body":"Added IndicGLUE benchmark for evaluating models on 11 Indian Languages. The descriptions of the tasks and the corresponding paper can be found [here](https:\/\/indicnlp.ai4bharat.org\/indic-glue\/)\r\n\r\n- [x] Followed the instructions in CONTRIBUTING.md\r\n- [x] Ran the tests successfully \r\n- [x] Created the dummy data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/872\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/872\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/871","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/871\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/871\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/871\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/871","id":747470136,"node_id":"MDU6SXNzdWU3NDc0NzAxMzY=","number":871,"title":"terminate called after throwing an instance of 'google::protobuf::FatalException'","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-20T12:56:24Z","updated_at":"2020-12-12T21:16:32Z","closed_at":"2020-12-12T21:16:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am using the dataset \"iwslt2017-en-nl\", and after downloading it I am getting this error when trying to evaluate it on T5-base with seq2seq_trainer.py in the huggingface repo could you assist me please? thanks \r\n\r\n\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 63\/63 [02:47<00:00, 2.18s\/it][libprotobuf FATAL \/sentencepiece\/src\/..\/third_party\/protobuf-lite\/google\/protobuf\/repeated_field.h:1505] CHECK failed: (index) >= (0): \r\nterminate called after throwing an instance of 'google::protobuf::FatalException'\r\n what(): CHECK failed: (index) >= (0): \r\nrun_t5_base_eval.sh: line 19: 5795 Aborted ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/871\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/871\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/870","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/870\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/870\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/870\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/870","id":747021996,"node_id":"MDU6SXNzdWU3NDcwMjE5OTY=","number":870,"title":"[Feature Request] Add optional parameter in text loading script to preserve linebreaks","user":{"login":"jncasey","id":31020859,"node_id":"MDQ6VXNlcjMxMDIwODU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31020859?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jncasey","html_url":"https:\/\/github.com\/jncasey","followers_url":"https:\/\/api.github.com\/users\/jncasey\/followers","following_url":"https:\/\/api.github.com\/users\/jncasey\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jncasey\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jncasey\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jncasey\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jncasey\/orgs","repos_url":"https:\/\/api.github.com\/users\/jncasey\/repos","events_url":"https:\/\/api.github.com\/users\/jncasey\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jncasey\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-19T23:51:31Z","updated_at":"2020-11-27T13:48:11Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm working on a project about rhyming verse using phonetic poetry and song lyrics, and line breaks are a vital part of the data. \r\n\r\nI recently switched over to use the datasets library when my various corpora grew larger than my computer's memory. And so far, it is SO great. \r\n\r\nBut the first time I processed all of my data into a dataset, I hadn't realized the text loader script was processing the source files line-by-line and stripping off the newlines. \r\n\r\nOnce I caught the issue, I made my own data loader by modifying one line in the default text loader (changing `batch = batch.splitlines()` to `batch = batch.splitlines(True)` inside `_generate_tables`). And so I'm all set as far as my project is concerned.\r\n\r\nBut if my use case is more general, it seems like it'd be pretty trivial to add a kwarg to the default text loader called keeplinebreaks or something, which would default to False and get passed to `splitlines()`. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/870\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/870\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/869","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/869\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/869\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/869\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/869","id":746495711,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIzODc3OTkw","number":869,"title":"Update ner datasets infos","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-19T11:28:03Z","updated_at":"2020-11-19T14:14:18Z","closed_at":"2020-11-19T14:14:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/869","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/869","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/869.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/869.patch","merged_at":"2020-11-19T14:14:17Z"},"body":"Update the dataset_infos.json files for changes made in #850 regarding the ner datasets feature types (and the change to ClassLabel)\r\nI also fixed the ner types of conll2003","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/869\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/869\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/868","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/868\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/868\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/868\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/868","id":745889882,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIzMzc2MzQ3","number":868,"title":"Consistent metric outputs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-18T18:05:59Z","updated_at":"2020-11-26T17:32:27Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/868","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/868","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/868.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/868.patch","merged_at":null},"body":"To automate the use of metrics, they should return consistent outputs.\r\nIn particular I'm working on adding a conversion of metrics to keras metrics.\r\nTo achieve this we need two things:\r\n- have each metric return dictionaries of string -> floats since each keras metrics should return one float\r\n- define in the metric info the different fields of the output dictionary\r\n\r\nIn this PR I'm adding these two features.\r\nI also fixed a few bugs in some metrics\r\n\r\n#867 needs to be merged first","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/868\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/868\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/867","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/867\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/867\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/867\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/867","id":745773955,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIzMjc4MjI4","number":867,"title":"Fix some metrics feature types","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-18T15:46:11Z","updated_at":"2020-11-19T17:35:58Z","closed_at":"2020-11-19T17:35:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/867","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/867","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/867.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/867.patch","merged_at":"2020-11-19T17:35:57Z"},"body":"Replace `int` feature type to `int32` since `int` is not a pyarrow dtype in those metrics:\r\n- accuracy\r\n- precision\r\n- recall\r\n- f1\r\nI also added the sklearn citation and used keyword arguments to remove future warnings","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/867\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/867\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/866","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/866\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/866\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/866\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/866","id":745719222,"node_id":"MDU6SXNzdWU3NDU3MTkyMjI=","number":866,"title":"OSCAR from Inria group","user":{"login":"jchwenger","id":34098722,"node_id":"MDQ6VXNlcjM0MDk4NzIy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34098722?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jchwenger","html_url":"https:\/\/github.com\/jchwenger","followers_url":"https:\/\/api.github.com\/users\/jchwenger\/followers","following_url":"https:\/\/api.github.com\/users\/jchwenger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jchwenger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jchwenger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jchwenger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jchwenger\/orgs","repos_url":"https:\/\/api.github.com\/users\/jchwenger\/repos","events_url":"https:\/\/api.github.com\/users\/jchwenger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jchwenger\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-18T14:40:54Z","updated_at":"2020-11-18T15:01:30Z","closed_at":"2020-11-18T15:01:30Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *OSCAR* (Open Super-large Crawled ALMAnaCH coRpus), multilingual parsing of Common Crawl (separate crawls for many different languages), [here](https:\/\/oscar-corpus.com\/).\r\n- **Description:** *OSCAR or Open Super-large Crawled ALMAnaCH coRpus is a huge multilingual corpus obtained by language classification and filtering of the Common Crawl corpus using the goclassy architecture.*\r\n- **Paper:** *[here](https:\/\/hal.inria.fr\/hal-02148693)*\r\n- **Data:** *[here](https:\/\/oscar-corpus.com\/)*\r\n- **Motivation:** *useful for unsupervised tasks in separate languages. In an ideal world, your team would be able to obtain the unshuffled version, that could be used to train GPT-2-like models (the shuffled version, I suppose, could be used for translation).*\r\n\r\nI am aware that you do offer the \"colossal\" Common Crawl dataset already, but this has the advantage to be available in many subcorpora for different languages.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/866\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/866\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/865","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/865\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/865\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/865\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/865","id":745430497,"node_id":"MDU6SXNzdWU3NDU0MzA0OTc=","number":865,"title":"Have Trouble importing `datasets`","user":{"login":"forest1988","id":2755894,"node_id":"MDQ6VXNlcjI3NTU4OTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2755894?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/forest1988","html_url":"https:\/\/github.com\/forest1988","followers_url":"https:\/\/api.github.com\/users\/forest1988\/followers","following_url":"https:\/\/api.github.com\/users\/forest1988\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/forest1988\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/forest1988\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/forest1988\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/forest1988\/orgs","repos_url":"https:\/\/api.github.com\/users\/forest1988\/repos","events_url":"https:\/\/api.github.com\/users\/forest1988\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/forest1988\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-18T08:04:41Z","updated_at":"2020-11-18T08:16:35Z","closed_at":"2020-11-18T08:16:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm failing to import transformers (v4.0.0-dev), and tracing the cause seems to be failing to import datasets.\r\n\r\nI cloned the newest version of datasets (master branch), and do `pip install -e .`.\r\n\r\nThen, `import datasets` causes the error below.\r\n\r\n```\r\n~\/workspace\/Clone\/datasets\/src\/datasets\/utils\/file_utils.py in \r\n 116 sys.path.append(str(HF_MODULES_CACHE))\r\n 117 \r\n--> 118 os.makedirs(HF_MODULES_CACHE, exist_ok=True)\r\n 119 if not os.path.exists(os.path.join(HF_MODULES_CACHE, \"__init__.py\")):\r\n 120 with open(os.path.join(HF_MODULES_CACHE, \"__init__.py\"), \"w\"):\r\n\r\n~\/.pyenv\/versions\/anaconda3-2020.07\/lib\/python3.8\/os.py in makedirs(name, mode, exist_ok)\r\n 221 return\r\n 222 try:\r\n--> 223 mkdir(name, mode)\r\n 224 except OSError:\r\n 225 # Cannot rely on checking for EEXIST, since the operating system \r\n\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/.cache\/huggingface\/modules'\r\n```\r\n\r\nThe error occurs in `os.makedirs` in `file_utils.py`, even though `exist_ok = True` option is set.\r\n(I use Python 3.8, so `exist_ok` is expected to work.)\r\n\r\nI've checked some environment variables, and they are set as below.\r\n\r\n```\r\n*** NameError: name 'HF_MODULES_CACHE' is not defined\r\n*** NameError: name 'hf_cache_home' is not defined\r\n*** NameError: name 'XDG_CACHE_HOME' is not defined\r\n```\r\n\r\nShould I set some environment variables before using this library?\r\nAnd, do you have any idea why \"No such file or directory\" occurs even though the `exist_ok = True` option is set?\r\n\r\nThank you in advance.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/865\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/865\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/864","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/864\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/864\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/864\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/864","id":745322357,"node_id":"MDU6SXNzdWU3NDUzMjIzNTc=","number":864,"title":"Unable to download cnn_dailymail dataset","user":{"login":"rohitashwa1907","id":46031058,"node_id":"MDQ6VXNlcjQ2MDMxMDU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46031058?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rohitashwa1907","html_url":"https:\/\/github.com\/rohitashwa1907","followers_url":"https:\/\/api.github.com\/users\/rohitashwa1907\/followers","following_url":"https:\/\/api.github.com\/users\/rohitashwa1907\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rohitashwa1907\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rohitashwa1907\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rohitashwa1907\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rohitashwa1907\/orgs","repos_url":"https:\/\/api.github.com\/users\/rohitashwa1907\/repos","events_url":"https:\/\/api.github.com\/users\/rohitashwa1907\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rohitashwa1907\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2020-11-18T04:38:02Z","updated_at":"2020-11-20T05:22:11Z","closed_at":"2020-11-20T05:22:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"### Script to reproduce the error\r\n```\r\nfrom datasets import load_dataset\r\n\r\ntrain_dataset = load_dataset(\"cnn_dailymail\", \"3.0.0\", split= 'train[:10%')\r\nvalid_dataset = load_dataset(\"cnn_dailymail\",\"3.0.0\", split=\"validation[:5%]\")\r\n```\r\n\r\n\r\n### Error\r\n```\r\n---------------------------------------------------------------------------\r\nNotADirectoryError Traceback (most recent call last)\r\n in ()\r\n 1 from datasets import load_dataset\r\n 2 \r\n----> 3 train_dataset = load_dataset(\"cnn_dailymail\", \"3.0.0\", split= 'train[:10%')\r\n 4 valid_dataset = load_dataset(\"cnn_dailymail\",\"3.0.0\", split=\"validation[:5%]\")\r\n\r\n5 frames\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, save_infos, script_version, **config_kwargs)\r\n 609 download_config=download_config,\r\n 610 download_mode=download_mode,\r\n--> 611 ignore_verifications=ignore_verifications,\r\n 612 )\r\n 613 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n 469 if not downloaded_from_gcs:\r\n 470 self._download_and_prepare(\r\n--> 471 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 472 )\r\n 473 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 524 split_dict = SplitDict(dataset_name=self.name)\r\n 525 split_generators_kwargs = self._make_split_generators_kwargs(prepare_split_kwargs)\r\n--> 526 split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n 527 \r\n 528 # Checksums verification\r\n\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/cnn_dailymail\/0128610a44e10f25b4af6689441c72af86205282d26399642f7db38fa7535602\/cnn_dailymail.py in _split_generators(self, dl_manager)\r\n 252 def _split_generators(self, dl_manager):\r\n 253 dl_paths = dl_manager.download_and_extract(_DL_URLS)\r\n--> 254 train_files = _subset_filenames(dl_paths, datasets.Split.TRAIN)\r\n 255 # Generate shared vocabulary\r\n 256 \r\n\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/cnn_dailymail\/0128610a44e10f25b4af6689441c72af86205282d26399642f7db38fa7535602\/cnn_dailymail.py in _subset_filenames(dl_paths, split)\r\n 153 else:\r\n 154 logging.fatal(\"Unsupported split: %s\", split)\r\n--> 155 cnn = _find_files(dl_paths, \"cnn\", urls)\r\n 156 dm = _find_files(dl_paths, \"dm\", urls)\r\n 157 return cnn + dm\r\n\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/cnn_dailymail\/0128610a44e10f25b4af6689441c72af86205282d26399642f7db38fa7535602\/cnn_dailymail.py in _find_files(dl_paths, publisher, url_dict)\r\n 132 else:\r\n 133 logging.fatal(\"Unsupported publisher: %s\", publisher)\r\n--> 134 files = sorted(os.listdir(top_dir))\r\n 135 \r\n 136 ret_files = []\r\n\r\nNotADirectoryError: [Errno 20] Not a directory: '\/root\/.cache\/huggingface\/datasets\/downloads\/1bc05d24fa6dda2468e83a73cf6dc207226e01e3c48a507ea716dc0421da583b\/cnn\/stories'\r\n\r\n```\r\n\r\nThanks for any suggestions.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/864\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/864\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/863","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/863\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/863\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/863\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/863","id":744954534,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIyNTk0Mjg1","number":863,"title":"Add clear_cache parameter in the test command","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-17T17:52:29Z","updated_at":"2020-11-18T14:44:25Z","closed_at":"2020-11-18T14:44:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/863","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/863","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/863.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/863.patch","merged_at":"2020-11-18T14:44:24Z"},"body":"For certain datasets like OSCAR #348 there are lots of different configurations and each one of them can take a lot of disk space.\r\n\r\nI added a `--clear_cache` flag to the `datasets-cli test` command to be able to clear the cache after each configuration test to avoid filling up the disk. It should enable an easier generation for the `dataset_infos.json` file for OSCAR.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/863\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/863\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/862","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/862\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/862\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/862\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/862","id":744906131,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIyNTUzMzY1","number":862,"title":"Update head requests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-17T16:49:06Z","updated_at":"2020-11-18T14:43:53Z","closed_at":"2020-11-18T14:43:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/862","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/862","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/862.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/862.patch","merged_at":"2020-11-18T14:43:50Z"},"body":"Get requests and Head requests didn't have the same parameters.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/862\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/862\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/861","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/861\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/861\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/861\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/861","id":744753458,"node_id":"MDU6SXNzdWU3NDQ3NTM0NTg=","number":861,"title":"Possible Bug: Small training\/dataset file creates gigantic output","user":{"login":"NebelAI","id":7240417,"node_id":"MDQ6VXNlcjcyNDA0MTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7240417?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NebelAI","html_url":"https:\/\/github.com\/NebelAI","followers_url":"https:\/\/api.github.com\/users\/NebelAI\/followers","following_url":"https:\/\/api.github.com\/users\/NebelAI\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NebelAI\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NebelAI\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NebelAI\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NebelAI\/orgs","repos_url":"https:\/\/api.github.com\/users\/NebelAI\/repos","events_url":"https:\/\/api.github.com\/users\/NebelAI\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NebelAI\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2020-11-17T13:48:59Z","updated_at":"2021-03-30T14:04:04Z","closed_at":"2021-03-22T12:04:55Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hey guys,\r\n\r\nI was trying to create a new bert model from scratch via _huggingface transformers + tokenizers + dataets_ (actually using this example script by your team: https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/language-modeling\/run_mlm.py). It was supposed to be a first test with a small 5 GB raw text file but I can't even end the preprocessing handled by datasets because this tiny 5 GB text file becomes more than 1 TB when processing. My system was running out of space and crashed prematurely.\r\n\r\nI've done training from scratch via Google's bert repo in the past and I can remember that the resulting pretraining data can become quite big. But 5 GB becoming 1 TB was never the case. Is this considered normal or is it a bug?\r\n\r\nI've used the following CMD:\r\n`python xla_spawn.py --num_cores=8 run_mlm.py --model_type bert --config_name config.json --tokenizer_name tokenizer.json --train_file dataset_full.txt --do_train --output_dir out --max_steps 500000 --save_steps 2500 --save_total_limit 2 --prediction_loss_only --line_by_line --max_seq_length 128 --pad_to_max_length --preprocessing_num_workers 16 --per_device_train_batch_size 128 --overwrite_output_dir --debug`\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/861\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/861\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/860","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/860\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/860\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/860\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/860","id":744750691,"node_id":"MDU6SXNzdWU3NDQ3NTA2OTE=","number":860,"title":"wmt16 cs-en does not donwload ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-17T13:45:35Z","updated_at":"2020-11-27T13:47:04Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI am trying with wmt16, cs-en pair, thanks for the help, perhaps similar to the ro-en issue. thanks\r\n\r\n split=\"train\", n_obs=data_args.n_train) for task in data_args.task}\r\n File \"finetune_t5_trainer.py\", line 109, in \r\n split=\"train\", n_obs=data_args.n_train) for task in data_args.task}\r\n File \"\/home\/rabeeh\/internship\/seq2seq\/tasks\/tasks.py\", line 82, in get_dataset\r\n dataset = load_dataset(\"wmt16\", self.pair, split=split)\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 531, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/rabeeh\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt16\/7b2c4443a7d34c2e13df267eaa8cab4c62dd82f6b62b0d9ecc2e3a673ce17308\/wmt_utils.py\", line 755, in _split_generators\r\n downloaded_files = dl_manager.download_and_extract(urls_to_download)\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 254, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 179, in download\r\n num_proc=download_config.num_proc,\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 181, in _single_map_nested\r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 181, in \r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 163, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/opt\/conda\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 475, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/www.statmt.org\/wmt13\/training-parallel-commoncrawl.tgz","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/860\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/860\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/859","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/859\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/859\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/859\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/859","id":743917091,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIxNzI4MDM4","number":859,"title":"Integrate file_lock inside the lib for better logging control","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-16T15:13:39Z","updated_at":"2020-11-16T17:06:44Z","closed_at":"2020-11-16T17:06:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/859","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/859","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/859.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/859.patch","merged_at":"2020-11-16T17:06:42Z"},"body":"Previously the locking system of the lib was based on the file_lock package. However as noticed in #812 there were too many logs printed even when the datasets logging was set to warnings or errors.\r\n\r\nFor example\r\n```python\r\nimport logging\r\nlogging.basicConfig(level=logging.INFO)\r\n\r\nimport datasets\r\ndatasets.set_verbosity_warning()\r\ndatasets.load_dataset(\"squad\")\r\n```\r\nwould still log the file lock events:\r\n```\r\nINFO:filelock:Lock 5737989232 acquired on \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/44801f118d500eff6114bfc56ab4e6def941f1eb14b70ac1ecc052e15cdac49d.85f43de978b9b25921cb78d7a2f2b350c04acdbaedb9ecb5f7101cd7c0950e68.py.lock\r\nINFO:filelock:Lock 5737989232 released on \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/44801f118d500eff6114bfc56ab4e6def941f1eb14b70ac1ecc052e15cdac49d.85f43de978b9b25921cb78d7a2f2b350c04acdbaedb9ecb5f7101cd7c0950e68.py.lock\r\nINFO:filelock:Lock 4393489968 acquired on \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/_Users_quentinlhoest_.cache_huggingface_datasets_squad_plain_text_1.0.0_1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41.lock\r\nINFO:filelock:Lock 4393489968 released on \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/_Users_quentinlhoest_.cache_huggingface_datasets_squad_plain_text_1.0.0_1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41.lock\r\nINFO:filelock:Lock 4393490808 acquired on \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/_Users_quentinlhoest_.cache_huggingface_datasets_squad_plain_text_1.0.0_1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41.lock\r\nReusing dataset squad (\/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/squad\/plain_text\/1.0.0\/1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41)\r\nINFO:filelock:Lock 4393490808 released on \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/_Users_quentinlhoest_.cache_huggingface_datasets_squad_plain_text_1.0.0_1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41.lock\r\n```\r\n\r\nWith the integration of file_lock in the library, the ouput is much cleaner:\r\n```\r\nReusing dataset squad (\/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/squad\/plain_text\/1.0.0\/1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41)\r\n```\r\n\r\nSince the file_lock package is only a 450 lines file I think it's fine to have it inside the lib.\r\n\r\nFix #812 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/859\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/859\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/858","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/858\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/858\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/858\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/858","id":743904516,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIxNzE3ODQ4","number":858,"title":"Add SemEval-2010 task 8","user":{"login":"JoelNiklaus","id":3775944,"node_id":"MDQ6VXNlcjM3NzU5NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3775944?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JoelNiklaus","html_url":"https:\/\/github.com\/JoelNiklaus","followers_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/followers","following_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/orgs","repos_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/repos","events_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JoelNiklaus\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-16T14:57:57Z","updated_at":"2020-11-26T17:28:55Z","closed_at":"2020-11-26T17:28:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/858","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/858","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/858.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/858.patch","merged_at":"2020-11-26T17:28:55Z"},"body":"Hi,\r\nI don't know how to add dummy data, since I create the validation set out of the last 1000 examples of the train set. If you have a suggestion, I am happy to implement it.\r\nCheers,\r\nJoel","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/858\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/858\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/857","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/857\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/857\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/857\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/857","id":743863214,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIxNjg0ODIx","number":857,"title":"Use pandas reader in csv","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-16T14:05:45Z","updated_at":"2020-11-19T17:35:40Z","closed_at":"2020-11-19T17:35:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/857","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/857","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/857.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/857.patch","merged_at":"2020-11-19T17:35:38Z"},"body":"The pyarrow CSV reader has issues that the pandas one doesn't (see #836 ).\r\nTo fix that I switched to the pandas csv reader.\r\nThe new reader is compatible with all the pandas parameters to read csv files.\r\nMoreover it reads csv by chunk in order to save RAM, while the pyarrow one loads everything in memory.\r\n\r\nFix #836 \r\nFix #794 \r\n\r\nBreaking: now all the parameters to read to csv file can be used in the `load_dataset` kwargs when loading csv, and the previous pyarrow objects `pyarrow.csv.ReadOptions`, `pyarrow.csv.ParseOptions` and `pyarrow.csv.ConvertOptions` are not used anymore.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/857\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/857\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/856","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/856\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/856\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/856\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/856","id":743799239,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIxNjMzNTYz","number":856,"title":"Add open book corpus","user":{"login":"vblagoje","id":458335,"node_id":"MDQ6VXNlcjQ1ODMzNQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/458335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vblagoje","html_url":"https:\/\/github.com\/vblagoje","followers_url":"https:\/\/api.github.com\/users\/vblagoje\/followers","following_url":"https:\/\/api.github.com\/users\/vblagoje\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vblagoje\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vblagoje\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vblagoje\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vblagoje\/orgs","repos_url":"https:\/\/api.github.com\/users\/vblagoje\/repos","events_url":"https:\/\/api.github.com\/users\/vblagoje\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vblagoje\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":15,"created_at":"2020-11-16T12:30:02Z","updated_at":"2020-11-18T12:03:46Z","closed_at":"2020-11-17T15:22:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/856","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/856","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/856.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/856.patch","merged_at":"2020-11-17T15:22:17Z"},"body":"Adds book corpus based on Shawn Presser's [work](https:\/\/github.com\/soskek\/bookcorpus\/issues\/27) @richarddwang, the author of the original BookCorpus dataset, suggested it should be named [OpenBookCorpus](https:\/\/github.com\/huggingface\/datasets\/issues\/486). I named it BookCorpusOpen to be easily located alphabetically. But, of course, we can rename it if needed. \r\n\r\nIt contains 17868 dataset items; each item contains two fields: title and text. The title is the name of the book (just the file name) while the text contains unprocessed book text. Note that bookcorpus is pre-segmented into a sentence while this bookcorpus is not. This is intentional (see https:\/\/github.com\/huggingface\/datasets\/issues\/486) as some users might want to further process the text themselves. \r\n\r\n@lhoestq and others please review this PR thoroughly. cc @shawwn ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/856\/reactions","total_count":5,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":3,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/856\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/855","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/855\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/855\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/855\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/855","id":743690839,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIxNTQ2Njkx","number":855,"title":"Fix kor nli csv reader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-16T09:53:41Z","updated_at":"2020-11-16T13:59:14Z","closed_at":"2020-11-16T13:59:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/855","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/855","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/855.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/855.patch","merged_at":"2020-11-16T13:59:12Z"},"body":"The kor_nli dataset had an issue with the csv reader that was not able to parse the lines correctly. Some lines were merged together for some reason.\r\nI fixed that by iterating through the lines directly instead of using a csv reader.\r\nI also changed the feature names to match the other NLI datasets (i.e. use \"premise\", \"hypothesis\", \"label\" features)\r\n\r\nFix #821 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/855\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/855\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/854","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/854\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/854\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/854\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/854","id":743675376,"node_id":"MDU6SXNzdWU3NDM2NzUzNzY=","number":854,"title":"wmt16 does not download ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2020-11-16T09:31:51Z","updated_at":"2021-02-25T03:15:09Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I appreciate your help with the following error, thanks \r\n\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset(\"wmt16\", \"ro-en\", split=\"train\")\r\nDownloading and preparing dataset wmt16\/ro-en (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/root\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/7b2c4443a7d34c2e13df267eaa8cab4c62dd82f6b62b0d9ecc2e3a673ce17308...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 531, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt16\/7b2c4443a7d34c2e13df267eaa8cab4c62dd82f6b62b0d9ecc2e3a673ce17308\/wmt_utils.py\", line 755, in _split_generators\r\n downloaded_files = dl_manager.download_and_extract(urls_to_download)\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/download_manager.py\", line 254, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/download_manager.py\", line 179, in download\r\n num_proc=download_config.num_proc,\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 181, in _single_map_nested\r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 181, in \r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 163, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 475, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/opus.nlpl.eu\/download.php?f=SETIMES\/v2\/tmx\/en-ro.tmx.gz","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/854\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/854\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/853","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/853\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/853\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/853\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/853","id":743426583,"node_id":"MDU6SXNzdWU3NDM0MjY1ODM=","number":853,"title":"concatenate_datasets support axis=0 or 1 \uff1f","user":{"login":"renqingcolin","id":12437751,"node_id":"MDQ6VXNlcjEyNDM3NzUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12437751?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/renqingcolin","html_url":"https:\/\/github.com\/renqingcolin","followers_url":"https:\/\/api.github.com\/users\/renqingcolin\/followers","following_url":"https:\/\/api.github.com\/users\/renqingcolin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/renqingcolin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/renqingcolin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/renqingcolin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/renqingcolin\/orgs","repos_url":"https:\/\/api.github.com\/users\/renqingcolin\/repos","events_url":"https:\/\/api.github.com\/users\/renqingcolin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/renqingcolin\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892884,"node_id":"MDU6TGFiZWwxOTM1ODkyODg0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/help%20wanted","name":"help wanted","color":"008672","default":true,"description":"Extra attention is needed"},{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":10,"created_at":"2020-11-16T02:46:23Z","updated_at":"2021-04-19T16:07:18Z","closed_at":"2021-04-19T16:07:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I want to achieve the following result\r\n![image](https:\/\/user-images.githubusercontent.com\/12437751\/99207426-f0c8db80-27f8-11eb-820a-4d9f7287b742.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/853\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/853\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/852","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/852\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/852\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/852\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/852","id":743396240,"node_id":"MDU6SXNzdWU3NDMzOTYyNDA=","number":852,"title":"wmt cannot be downloaded ","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-16T01:04:41Z","updated_at":"2020-11-16T09:31:58Z","closed_at":"2020-11-16T09:31:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I appreciate your help with the following error, thanks \r\n\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset(\"wmt16\", \"ro-en\", split=\"train\")\r\nDownloading and preparing dataset wmt16\/ro-en (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/root\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/7b2c4443a7d34c2e13df267eaa8cab4c62dd82f6b62b0d9ecc2e3a673ce17308...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 531, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt16\/7b2c4443a7d34c2e13df267eaa8cab4c62dd82f6b62b0d9ecc2e3a673ce17308\/wmt_utils.py\", line 755, in _split_generators\r\n downloaded_files = dl_manager.download_and_extract(urls_to_download)\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/download_manager.py\", line 254, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/download_manager.py\", line 179, in download\r\n num_proc=download_config.num_proc,\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 181, in _single_map_nested\r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 181, in \r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 163, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 475, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/opus.nlpl.eu\/download.php?f=SETIMES\/v2\/tmx\/en-ro.tmx.gz","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/852\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/852\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/851","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/851\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/851\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/851\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/851","id":743343278,"node_id":"MDU6SXNzdWU3NDMzNDMyNzg=","number":851,"title":"Add support for other languages for rouge","user":{"login":"alexyalunin","id":23011284,"node_id":"MDQ6VXNlcjIzMDExMjg0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23011284?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexyalunin","html_url":"https:\/\/github.com\/alexyalunin","followers_url":"https:\/\/api.github.com\/users\/alexyalunin\/followers","following_url":"https:\/\/api.github.com\/users\/alexyalunin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexyalunin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexyalunin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexyalunin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexyalunin\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexyalunin\/repos","events_url":"https:\/\/api.github.com\/users\/alexyalunin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexyalunin\/received_events","type":"User","site_admin":false},"labels":[{"id":2067400959,"node_id":"MDU6TGFiZWwyMDY3NDAwOTU5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Metric%20discussion","name":"Metric discussion","color":"d722e8","default":false,"description":"Discussions on the metrics"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-15T20:57:45Z","updated_at":"2021-06-06T09:07:52Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I calculate rouge with\r\n```\r\nfrom datasets import load_metric\r\nrouge = load_metric(\"rouge\")\r\nrouge_output = rouge.compute(predictions=['\u0442\u0435\u0441\u0442 \u0442\u0435\u0441\u0442 \u043f\u0440\u0438\u0432\u0435\u0442'], references=['\u0442\u0435\u0441\u0442 \u0442\u0435\u0441\u0442 \u043f\u043e\u043a\u0430'], rouge_types=[\r\n \"rouge2\"])[\"rouge2\"].mid\r\nprint(rouge_output)\r\n```\r\nthe result is\r\n`Score(precision=0.0, recall=0.0, fmeasure=0.0)`\r\nIt seems like the `rouge_score` library that this metric uses filters all non-alphanueric latin characters \r\nin `rouge_scorer\/tokenize.py` with `text = re.sub(r\"[^a-z0-9]+\", \" \", six.ensure_str(text))`.\r\nPlease add support for other languages. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/851\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/851\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/850","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/850\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/850\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/850\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/850","id":742369419,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIwNTE0MDY3","number":850,"title":"Create ClassLabel for labelling tasks datasets","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-13T11:07:22Z","updated_at":"2020-11-16T10:32:05Z","closed_at":"2020-11-16T10:31:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/850","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/850","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/850.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/850.patch","merged_at":"2020-11-16T10:31:58Z"},"body":"This PR adds a specific `ClassLabel` for the datasets that are about a labelling task such as POS, NER or Chunking.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/850\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/850\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/849","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/849\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/849\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/849\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/849","id":742263333,"node_id":"MDU6SXNzdWU3NDIyNjMzMzM=","number":849,"title":"Load amazon dataset","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-13T08:34:24Z","updated_at":"2020-11-17T07:22:59Z","closed_at":"2020-11-17T07:22:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI was going through amazon_us_reviews dataset and found that example API usage given on website is different from the API usage while loading dataset. \r\n\r\nEg. what API usage is on the [website](https:\/\/huggingface.co\/datasets\/amazon_us_reviews) \r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"amazon_us_reviews\")\r\n```\r\nHow it is when I tried (the error generated does point me to the right direction though)\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"amazon_us_reviews\", 'Books_v1_00')\r\n``` \r\nAlso, there is some issue with formatting as it's not showing bullet list in description with new line. Can I work on it?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/849\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/849\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/848","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/848\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/848\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/848\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/848","id":742240942,"node_id":"MDU6SXNzdWU3NDIyNDA5NDI=","number":848,"title":"Error when concatenate_datasets","user":{"login":"shexuan","id":25664170,"node_id":"MDQ6VXNlcjI1NjY0MTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25664170?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shexuan","html_url":"https:\/\/github.com\/shexuan","followers_url":"https:\/\/api.github.com\/users\/shexuan\/followers","following_url":"https:\/\/api.github.com\/users\/shexuan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shexuan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shexuan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shexuan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shexuan\/orgs","repos_url":"https:\/\/api.github.com\/users\/shexuan\/repos","events_url":"https:\/\/api.github.com\/users\/shexuan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shexuan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-11-13T07:56:02Z","updated_at":"2020-11-13T17:40:59Z","closed_at":"2020-11-13T15:55:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello, when I concatenate two dataset loading from disk, I encountered a problem:\r\n```\r\ntest_dataset = load_from_disk('data\/test_dataset')\r\ntrn_dataset = load_from_disk('data\/train_dataset')\r\n\r\ntrain_dataset = concatenate_datasets([trn_dataset, test_dataset])\r\n```\r\nAnd it reported ValueError blow:\r\n```\r\n---------------------------------------------------------------------------\r\nValueError Traceback (most recent call last)\r\n in \r\n----> 1 train_dataset = concatenate_datasets([trn_dataset, test_dataset])\r\n\r\n\/opt\/miniconda3\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in concatenate_datasets(dsets, info, split)\r\n 2547 \"However datasets' indices {} come from memory and datasets' indices {} come from disk.\".format(\r\n 2548 [i for i in range(len(dsets)) if indices_mappings_in_memory[i]],\r\n-> 2549 [i for i in range(len(dsets)) if not indices_mappings_in_memory[i]],\r\n 2550 )\r\n 2551 )\r\n\r\nValueError: Datasets' indices should ALL come from memory, or should ALL come from disk.\r\nHowever datasets' indices [1] come from memory and datasets' indices [0] come from disk.\r\n```\r\n\r\nBut it's curious both of my datasets loading from disk, so I check the source code in `arrow_dataset.py` about the Error:\r\n```\r\ntrn_dataset._data_files\r\n# output\r\n[{'filename': 'data\/train_dataset\/csv-train.arrow', 'skip': 0, 'take': 593264}]\r\n\r\ntest_dataset._data_files\r\n# output\r\n[{'filename': 'data\/test_dataset\/csv-test.arrow', 'skip': 0, 'take': 424383}]\r\n\r\nprint([not dset._data_files for dset in [trn_dataset, test_dataset]])\r\n# [False, False]\r\n\r\n# And I tested the code the same as arrow_dataset, but nothing happened\r\ndsets = [trn_dataset, test_dataset]\r\ndsets_in_memory = [not dset._data_files for dset in dsets]\r\nif any(dset_in_memory != dsets_in_memory[0] for dset_in_memory in dsets_in_memory):\r\n raise ValueError(\r\n \"Datasets should ALL come from memory, or should ALL come from disk.\\n\"\r\n \"However datasets {} come from memory and datasets {} come from disk.\".format(\r\n [i for i in range(len(dsets)) if dsets_in_memory[i]],\r\n [i for i in range(len(dsets)) if not dsets_in_memory[i]],\r\n )\r\n )\r\n```\r\n\r\nAny suggestions would be greatly appreciated! \r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/848\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/848\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/847","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/847\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/847\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/847\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/847","id":742179495,"node_id":"MDU6SXNzdWU3NDIxNzk0OTU=","number":847,"title":"multiprocessing in dataset map \"can only test a child process\"","user":{"login":"timothyjlaurent","id":2000204,"node_id":"MDQ6VXNlcjIwMDAyMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2000204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timothyjlaurent","html_url":"https:\/\/github.com\/timothyjlaurent","followers_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/followers","following_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/orgs","repos_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/repos","events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-11-13T06:01:04Z","updated_at":"2021-02-01T16:53:28Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Using a dataset with a single 'text' field and a fast tokenizer in a jupyter notebook.\r\n\r\n``` \r\ndef tokenizer_fn(example):\r\n return tokenizer.batch_encode_plus(example['text'])\r\n\r\nds_tokenized = text_dataset.map(tokenizer_fn, batched=True, num_proc=6, remove_columns=['text'])\r\n```\r\n\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nRemoteTraceback Traceback (most recent call last)\r\nRemoteTraceback: \r\n\"\"\"\r\nTraceback (most recent call last):\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/multiprocess\/pool.py\", line 119, in worker\r\n result = (True, func(*args, **kwds))\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py\", line 156, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/datasets\/fingerprint.py\", line 163, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py\", line 1510, in _map_single\r\n for i in pbar:\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/tqdm\/notebook.py\", line 228, in __iter__\r\n for obj in super(tqdm_notebook, self).__iter__(*args, **kwargs):\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/tqdm\/std.py\", line 1186, in __iter__\r\n self.close()\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/tqdm\/notebook.py\", line 251, in close\r\n super(tqdm_notebook, self).close(*args, **kwargs)\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/tqdm\/std.py\", line 1291, in close\r\n fp_write('')\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/tqdm\/std.py\", line 1288, in fp_write\r\n self.fp.write(_unicode(s))\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/wandb\/sdk\/lib\/redirect.py\", line 91, in new_write\r\n cb(name, data)\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/wandb\/sdk\/wandb_run.py\", line 598, in _console_callback\r\n self._backend.interface.publish_output(name, data)\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/wandb\/sdk\/interface\/interface.py\", line 146, in publish_output\r\n self._publish_output(o)\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/wandb\/sdk\/interface\/interface.py\", line 151, in _publish_output\r\n self._publish(rec)\r\n File \"\/home\/jovyan\/share\/users\/tlaurent\/invitae-bert\/ve\/lib\/python3.6\/site-packages\/wandb\/sdk\/interface\/interface.py\", line 431, in _publish\r\n if self._process and not self._process.is_alive():\r\n File \"\/usr\/lib\/python3.6\/multiprocessing\/process.py\", line 134, in is_alive\r\n assert self._parent_pid == os.getpid(), 'can only test a child process'\r\nAssertionError: can only test a child process\r\n\"\"\"\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/847\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/847\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/846","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/846\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/846\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/846\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/846","id":741885174,"node_id":"MDU6SXNzdWU3NDE4ODUxNzQ=","number":846,"title":"Add HoVer multi-hop fact verification dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-12T19:55:46Z","updated_at":"2020-12-10T21:47:33Z","closed_at":"2020-12-10T21:47:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** HoVer\r\n- **Description:** https:\/\/twitter.com\/YichenJiang9\/status\/1326954363806429186 contains 20K claim verification examples\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2011.03088\r\n- **Data:** https:\/\/hover-nlp.github.io\/\r\n- **Motivation:** There are still few multi-hop information extraction benchmarks (HotpotQA, which dataset wase based off, notwithstanding)\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/846\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/846\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/845","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/845\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/845\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/845\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/845","id":741841350,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIwMDg1NDMy","number":845,"title":"amazon description fields as bullets","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-12T18:50:41Z","updated_at":"2020-11-12T18:50:54Z","closed_at":"2020-11-12T18:50:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/845","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/845","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/845.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/845.patch","merged_at":"2020-11-12T18:50:54Z"},"body":"One more minor formatting change to amazon reviews's description (in addition to #844). Just reformatting the fields to display as a bulleted list in markdown.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/845\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/845\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/844","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/844\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/844\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/844\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/844","id":741835661,"node_id":"MDExOlB1bGxSZXF1ZXN0NTIwMDgwNzM5","number":844,"title":"add newlines to amazon desc","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-12T18:41:20Z","updated_at":"2020-11-12T18:42:25Z","closed_at":"2020-11-12T18:42:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/844","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/844","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/844.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/844.patch","merged_at":"2020-11-12T18:42:21Z"},"body":"Just a quick formatting fix to hopefully make it render nicer on Viewer","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/844\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/844\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/843","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/843\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/843\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/843\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/843","id":741531121,"node_id":"MDU6SXNzdWU3NDE1MzExMjE=","number":843,"title":"use_custom_baseline still produces errors for bertscore","user":{"login":"penatbater","id":37921244,"node_id":"MDQ6VXNlcjM3OTIxMjQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37921244?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/penatbater","html_url":"https:\/\/github.com\/penatbater","followers_url":"https:\/\/api.github.com\/users\/penatbater\/followers","following_url":"https:\/\/api.github.com\/users\/penatbater\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/penatbater\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/penatbater\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/penatbater\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/penatbater\/orgs","repos_url":"https:\/\/api.github.com\/users\/penatbater\/repos","events_url":"https:\/\/api.github.com\/users\/penatbater\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/penatbater\/received_events","type":"User","site_admin":false},"labels":[{"id":2067393914,"node_id":"MDU6TGFiZWwyMDY3MzkzOTE0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20bug","name":"metric bug","color":"25b21e","default":false,"description":"A bug in a metric script"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-11-12T11:44:32Z","updated_at":"2021-08-31T10:06:44Z","closed_at":"2021-02-09T14:21:48Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`metric = load_metric('bertscore')`\r\n`a1 = \"random sentences\"`\r\n`b1 = \"random sentences\"`\r\n`metric.compute(predictions = [a1], references = [b1], lang = 'en')`\r\n\r\n`Traceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/stephen_chan\/.local\/lib\/python3.6\/site-packages\/datasets\/metric.py\", line 393, in compute\r\n output = self._compute(predictions=predictions, references=references, **kwargs)\r\n File \"\/home\/stephen_chan\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/bertscore\/361e597a01a41d6cf95d94bbfb01dea16261687abc0c6c74cc9930f80488f363\/bertscore.py\", line 108, in _compute\r\n hashcode = bert_score.utils.get_hash(model_type, num_layers, idf, rescale_with_baseline)\r\nTypeError: get_hash() missing 1 required positional argument: 'use_custom_baseline'`\r\n\r\nAdding 'use_custom_baseline = False' as an argument produces this error\r\n\r\n`Traceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/stephen_chan\/.local\/lib\/python3.6\/site-packages\/datasets\/metric.py\", line 393, in compute\r\n output = self._compute(predictions=predictions, references=references, **kwargs)\r\nTypeError: _compute() got an unexpected keyword argument 'use_custom_baseline'`\r\n\r\nThis is on Ubuntu 18.04, Python 3.6.9, datasets version 1.1.2","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/843\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/843\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/842","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/842\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/842\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/842\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/842","id":741208428,"node_id":"MDU6SXNzdWU3NDEyMDg0Mjg=","number":842,"title":"How to enable `.map()` pre-processing pipelines to support multi-node parallelism?","user":{"login":"shangw-nvidia","id":66387198,"node_id":"MDQ6VXNlcjY2Mzg3MTk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66387198?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shangw-nvidia","html_url":"https:\/\/github.com\/shangw-nvidia","followers_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/followers","following_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/orgs","repos_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/repos","events_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-12T02:04:38Z","updated_at":"2020-11-12T23:28:27Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nCurrently, multiprocessing can be enabled for the `.map()` stages on a single node. However, in the case of multi-node training, (since more than one node would be available) I'm wondering if it's possible to extend the parallel processing among nodes, instead of only 1 node running the `.map()` while the other node is waiting for it to finish?\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/842\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/842\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/841","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/841\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/841\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/841\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/841","id":740737448,"node_id":"MDU6SXNzdWU3NDA3Mzc0NDg=","number":841,"title":"Can not reuse datasets already downloaded","user":{"login":"jc-hou","id":30210529,"node_id":"MDQ6VXNlcjMwMjEwNTI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30210529?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jc-hou","html_url":"https:\/\/github.com\/jc-hou","followers_url":"https:\/\/api.github.com\/users\/jc-hou\/followers","following_url":"https:\/\/api.github.com\/users\/jc-hou\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jc-hou\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jc-hou\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jc-hou\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jc-hou\/orgs","repos_url":"https:\/\/api.github.com\/users\/jc-hou\/repos","events_url":"https:\/\/api.github.com\/users\/jc-hou\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jc-hou\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-11T12:42:15Z","updated_at":"2020-11-11T18:17:16Z","closed_at":"2020-11-11T18:17:16Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\nI need to connect to a frontal node (with http proxy, no gpu) before connecting to a gpu node (but no http proxy, so can not use wget so on).\r\nI successfully downloaded and reuse the wikipedia datasets in a frontal node. \r\nWhen I connect to the gpu node, I supposed to use the downloaded datasets from cache, but failed and end with time out error.\r\n\r\nOn frontal node:\r\n```\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset('wikipedia', '20200501.en')\r\nReusing dataset wikipedia (\/linkhome\/rech\/genini01\/uua34ms\/.cache\/huggingface\/datasets\/wikipedia\/20200501.en\/1.0.0\/f92599dfccab29832c442b82870fa8f6983e5b4ebbf5e6e2dcbe894e325339cd)\r\n\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/torch\/cuda\/__init__.py:52: UserWarning: CUDA initialization: Found no NVIDIA driver on your system. Please check that you have an NVIDIA GPU and installed a driver from http:\/\/www.nvidia.com\/Download\/index.aspx (Triggered internally at \/pytorch\/c10\/cuda\/CUDAFunctions.cpp:100.)\r\n return torch._C._cuda_getDeviceCount() > 0\r\n```\r\n\r\nOn gpu node:\r\n```\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset('wikipedia', '20200501.en')\r\nTraceback (most recent call last):\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/urllib3\/connection.py\", line 160, in _new_conn\r\n (self._dns_host, self.port), self.timeout, **extra_kw\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/urllib3\/util\/connection.py\", line 84, in create_connection\r\n raise err\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/urllib3\/util\/connection.py\", line 74, in create_connection\r\n sock.connect(sa)\r\nTimeoutError: [Errno 110] Connection timed out\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/urllib3\/connectionpool.py\", line 677, in urlopen\r\n chunked=chunked,\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/urllib3\/connectionpool.py\", line 381, in _make_request\r\n self._validate_conn(conn)\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/urllib3\/connectionpool.py\", line 978, in _validate_conn\r\n conn.connect()\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/urllib3\/connection.py\", line 309, in connect\r\n conn = self._new_conn()\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/urllib3\/connection.py\", line 172, in _new_conn\r\n self, \"Failed to establish a new connection: %s\" % e\r\nurllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 110] Connection timed out\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/requests\/adapters.py\", line 449, in send\r\n timeout=timeout\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/urllib3\/connectionpool.py\", line 727, in urlopen\r\n method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/urllib3\/util\/retry.py\", line 446, in increment\r\n raise MaxRetryError(_pool, url, error or ResponseError(cause))\r\nurllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='s3.amazonaws.com', port=443): Max retries exceeded with url: \/datasets.huggingface.co\/datasets\/datasets\/wikipedia\/wikipedia.py (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 110] Connection timed out',))\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 590, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 264, in prepare_module\r\n head_hf_s3(path, filename=name, dataset=dataset)\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 200, in head_hf_s3\r\n return requests.head(hf_bucket_url(identifier=identifier, filename=filename, use_cdn=use_cdn, dataset=dataset))\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/requests\/api.py\", line 104, in head\r\n return request('head', url, **kwargs)\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/requests\/api.py\", line 61, in request\r\n return session.request(method=method, url=url, **kwargs)\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/requests\/sessions.py\", line 530, in request\r\n resp = self.send(prep, **send_kwargs)\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/requests\/sessions.py\", line 643, in send\r\n r = adapter.send(request, **kwargs)\r\n File \"\/linkhome\/rech\/genini01\/uua34ms\/work\/anaconda3\/envs\/pytorch_pip170_cuda102\/lib\/python3.6\/site-packages\/requests\/adapters.py\", line 516, in send\r\n raise ConnectionError(e, request=request)\r\nrequests.exceptions.ConnectionError: HTTPSConnectionPool(host='s3.amazonaws.com', port=443): Max retries exceeded with url: \/datasets.huggingface.co\/datasets\/datasets\/wikipedia\/wikipedia.py (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 110] Connection timed out',))\r\n\r\n```\r\n\r\nAny advice?Thanks!\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/841\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/841\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/840","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/840\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/840\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/840\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/840","id":740632771,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE5MDg2NDUw","number":840,"title":"Update squad_v2.py","user":{"login":"Javier-Jimenez99","id":38747614,"node_id":"MDQ6VXNlcjM4NzQ3NjE0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38747614?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Javier-Jimenez99","html_url":"https:\/\/github.com\/Javier-Jimenez99","followers_url":"https:\/\/api.github.com\/users\/Javier-Jimenez99\/followers","following_url":"https:\/\/api.github.com\/users\/Javier-Jimenez99\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Javier-Jimenez99\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Javier-Jimenez99\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Javier-Jimenez99\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Javier-Jimenez99\/orgs","repos_url":"https:\/\/api.github.com\/users\/Javier-Jimenez99\/repos","events_url":"https:\/\/api.github.com\/users\/Javier-Jimenez99\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Javier-Jimenez99\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-11T09:58:41Z","updated_at":"2020-11-11T15:29:34Z","closed_at":"2020-11-11T15:26:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/840","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/840","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/840.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/840.patch","merged_at":"2020-11-11T15:26:35Z"},"body":"Change lines 100 and 102 to prevent overwriting ```predictions``` variable.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/840\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/840\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/839","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/839\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/839\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/839\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/839","id":740355270,"node_id":"MDU6SXNzdWU3NDAzNTUyNzA=","number":839,"title":"XSum dataset missing spaces between sentences","user":{"login":"loganlebanoff","id":10007282,"node_id":"MDQ6VXNlcjEwMDA3Mjgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10007282?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/loganlebanoff","html_url":"https:\/\/github.com\/loganlebanoff","followers_url":"https:\/\/api.github.com\/users\/loganlebanoff\/followers","following_url":"https:\/\/api.github.com\/users\/loganlebanoff\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/loganlebanoff\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/loganlebanoff\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/loganlebanoff\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/loganlebanoff\/orgs","repos_url":"https:\/\/api.github.com\/users\/loganlebanoff\/repos","events_url":"https:\/\/api.github.com\/users\/loganlebanoff\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/loganlebanoff\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-11T00:34:43Z","updated_at":"2020-11-11T00:34:43Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I noticed that the XSum dataset has no space between sentences. This could lead to worse results for anyone training or testing on it. Here's an example (0th entry in the test set):\r\n\r\n`The London trio are up for best UK act and best album, as well as getting two nominations in the best song category.\"We got told like this morning 'Oh I think you're nominated'\", said Dappy.\"And I was like 'Oh yeah, which one?' And now we've got nominated for four awards. I mean, wow!\"Bandmate Fazer added: \"We thought it's best of us to come down and mingle with everyone and say hello to the cameras. And now we find we've got four nominations.\"The band have two shots at the best song prize, getting the nod for their Tynchy Stryder collaboration Number One, and single Strong Again.Their album Uncle B will also go up against records by the likes of Beyonce and Kanye West.N-Dubz picked up the best newcomer Mobo in 2007, but female member Tulisa said they wouldn't be too disappointed if they didn't win this time around.\"At the end of the day we're grateful to be where we are in our careers.\"If it don't happen then it don't happen - live to fight another day and keep on making albums and hits for the fans.\"Dappy also revealed they could be performing live several times on the night.The group will be doing Number One and also a possible rendition of the War Child single, I Got Soul.The charity song is a re-working of The Killers' All These Things That I've Done and is set to feature artists like Chipmunk, Ironik and Pixie Lott.This year's Mobos will be held outside of London for the first time, in Glasgow on 30 September.N-Dubz said they were looking forward to performing for their Scottish fans and boasted about their recent shows north of the border.\"We just done Edinburgh the other day,\" said Dappy.\"We smashed up an N-Dubz show over there. We done Aberdeen about three or four months ago - we smashed up that show over there! Everywhere we go we smash it up!\"`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/839\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/839\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/838","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/838\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/838\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/838\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/838","id":740328382,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE4ODM0NTE5","number":838,"title":"CNN\/Dailymail Dataset Card","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-10T23:56:43Z","updated_at":"2020-11-25T21:09:51Z","closed_at":"2020-11-25T21:09:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/838","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/838","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/838.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/838.patch","merged_at":"2020-11-25T21:09:50Z"},"body":"Link to the card page: https:\/\/github.com\/mcmillanmajora\/datasets\/tree\/cnn_dailymail_card\/datasets\/cnn_dailymail\n\nOne of the questions this dataset brings up is how we want to handle versioning of the cards to mirror versions of the dataset. The different versions of this dataset are used for different tasks (which may not be reflected in the versions that we currently have in the repo?), but it's only the structure that's changing rather than the content in this particular case, at least between versions 2.0.0 and 3.0.0. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/838\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/838\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/837","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/837\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/837\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/837\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/837","id":740250215,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE4NzcwNDM5","number":837,"title":"AlloCin\u00e9 dataset card","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-10T21:19:53Z","updated_at":"2020-11-25T21:56:27Z","closed_at":"2020-11-25T21:56:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/837","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/837","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/837.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/837.patch","merged_at":"2020-11-25T21:56:27Z"},"body":"Link to the card page: https:\/\/github.com\/mcmillanmajora\/datasets\/blob\/allocine_card\/datasets\/allocine\/README.md\n\nThere wasn't as much information available for this dataset, so I'm wondering what's the best way to address open questions about the dataset. For example, where did the list of films that the dataset creator used come from?\n\nI'm also wondering how best to go about talking about limitations when so little is known about the data. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/837\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/837\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/836","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/836\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/836\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/836\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/836","id":740187613,"node_id":"MDU6SXNzdWU3NDAxODc2MTM=","number":836,"title":"load_dataset with 'csv' is not working. while the same file is loading with 'text' mode or with pandas","user":{"login":"randubin","id":8919490,"node_id":"MDQ6VXNlcjg5MTk0OTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8919490?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/randubin","html_url":"https:\/\/github.com\/randubin","followers_url":"https:\/\/api.github.com\/users\/randubin\/followers","following_url":"https:\/\/api.github.com\/users\/randubin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/randubin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/randubin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/randubin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/randubin\/orgs","repos_url":"https:\/\/api.github.com\/users\/randubin\/repos","events_url":"https:\/\/api.github.com\/users\/randubin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/randubin\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-11-10T19:35:40Z","updated_at":"2021-11-24T16:59:19Z","closed_at":"2020-11-19T17:35:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi All\r\nI am trying to load a custom dataset and I am trying to load a single file to make sure the file is loading correctly:\r\ndataset = load_dataset('csv', data_files=files)\r\nWhen I run it I get:\r\n\r\nDownloading and preparing dataset csv\/default-35575a1051604c88 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) tocache\/huggingface\/datasets\/csv\/default-35575a1051604c88\/0.0.0\/49187751790fa4d820300fd4d0707896e5b941f1a9c644652645b866716a4ac4...\r\n\r\nI am getting this error:\r\n6a4ac4\/csv.py in _generate_tables(self, files)\r\n 78 def _generate_tables(self, files):\r\n 79 for i, file in enumerate(files):\r\n---> 80 pa_table = pac.read_csv(\r\n 81 file,\r\n 82 read_options=self.config.pa_read_options,\r\n\r\n~\/anaconda2\/envs\/nlp\/lib\/python3.8\/site-packages\/pyarrow\/_csv.pyx in pyarrow._csv.read_csv()\r\n\r\n~\/anaconda2\/envs\/nlp\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/anaconda2\/envs\/nlp\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\n**ArrowInvalid: straddling object straddles two block boundaries (try to increase block size?)**\r\n\r\n\r\n\r\nThe size of the file is 3.5 GB. When I try smaller files I do not have an issue. When I load it with 'text' parser I can see all data but it is not what I need.\r\nThere is no issue reading the file with pandas. any idea what could be the issue?\r\nWhen I am running a different CSV I do not get this line:\r\n (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size)\r\n\r\nAny ideas?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/836\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/836\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/835","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/835\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/835\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/835\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/835","id":740102210,"node_id":"MDU6SXNzdWU3NDAxMDIyMTA=","number":835,"title":"Wikipedia postprocessing","user":{"login":"bminixhofer","id":13353204,"node_id":"MDQ6VXNlcjEzMzUzMjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13353204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bminixhofer","html_url":"https:\/\/github.com\/bminixhofer","followers_url":"https:\/\/api.github.com\/users\/bminixhofer\/followers","following_url":"https:\/\/api.github.com\/users\/bminixhofer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bminixhofer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bminixhofer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bminixhofer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bminixhofer\/orgs","repos_url":"https:\/\/api.github.com\/users\/bminixhofer\/repos","events_url":"https:\/\/api.github.com\/users\/bminixhofer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bminixhofer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-10T17:26:38Z","updated_at":"2020-11-10T18:23:20Z","closed_at":"2020-11-10T17:49:21Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, thanks for this library!\r\n\r\nRunning this code:\r\n\r\n```py\r\nimport datasets\r\nwikipedia = datasets.load_dataset(\"wikipedia\", \"20200501.de\")\r\nprint(wikipedia['train']['text'][0])\r\n```\r\n\r\nI get:\r\n\r\n```\r\nmini|Ricardo Flores Mag\u00f3n\r\nmini|Mexikanische Revolution\u00e4re, Mag\u00f3n in der Mitte anf\u00fchrend, gegen die Diktatur von Porfirio Diaz, Ausschnitt des Gem\u00e4lde \u201eTierra y Libertad\u201c von Idelfonso Carrara (?) von 1930.\r\n\r\nRicardo Flores Mag\u00f3n (* 16. September 1874 in San Antonio Eloxochitl\u00e1n im mexikanischen Bundesstaat Oaxaca; \u2020 22. November 1922 im Bundesgef\u00e4ngnis Leavenworth im US-amerikanischen Bundesstaat Kansas) war als Journalist, Gewerkschafter und Literat ein f\u00fchrender anarchistischer Theoretiker und Aktivist, der die revolution\u00e4re mexikanische Bewegung radikal beeinflusste. Mag\u00f3n war Gr\u00fcnder der Partido Liberal Mexicano und Mitglied der Industrial Workers of the World.\r\n\r\nPolitische Biografie \r\nJournalistisch und politisch k\u00e4mpfte er und sein Bruder sehr kompromisslos gegen die Diktatur Porfirio Diaz. Philosophisch und politisch orientiert an radikal anarchistischen Idealen und den Erfahrungen seiner indigenen Vorfahren bei der gemeinschaftlichen Bewirtschaftung des Gemeindelandes, machte er die Forderung \u201eLand und Freiheit\u201c (Tierra y Libertad) popul\u00e4r. Besonders Francisco Villa und Emiliano Zapata griffen die Forderung Land und Freiheit auf. Seine Philosophie hatte gro\u00dfen Einfluss auf die Landarbeiter. 1904 floh er in die USA und gr\u00fcndete 1906 die Partido Liberal Mexicano. Im Exil lernte er u. a. Emma Goldman kennen. Er verbrachte die meiste Zeit seines Lebens in Gef\u00e4ngnissen und im Exil und wurde 1918 in den USA wegen \u201eBehinderung der Kriegsanstrengungen\u201c zu zwanzig Jahren Gef\u00e4ngnis verurteilt. Zu seinem Tod gibt es drei verschiedene Theorien. Offiziell starb er an Herzversagen. Librado Rivera, der die Leiche mit eigenen Augen gesehen hat, geht davon aus, dass Mag\u00f3n von einem Mitgefangenen erdrosselt wurde. Die staatstreue Gewerkschaftszeitung CROM ver\u00f6ffentlichte 1923 einen Beitrag, nachdem Mag\u00f3n von einem Gef\u00e4ngnisw\u00e4rter erschlagen wurde.\r\nmini|Die Br\u00fcder Ricardo (links) und Enrique Flores Mag\u00f3n (rechts) vor dem Los Angeles County Jail, 1917\r\n\r\n[...]\r\n```\r\n\r\nso some Markup like `mini|` is still left. Should I run another parser on this text before feeding it to an ML model or is this a known imperfection of parsing Wiki markup?\r\n\r\nApologies if this has been asked before.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/835\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/835\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/834","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/834\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/834\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/834\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/834","id":740082890,"node_id":"MDU6SXNzdWU3NDAwODI4OTA=","number":834,"title":"[GEM] add WikiLingua cross-lingual abstractive summarization dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-10T17:00:43Z","updated_at":"2021-04-15T12:04:09Z","closed_at":"2021-04-15T12:01:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** WikiLingua\r\n- **Description:** The dataset includes ~770k article and summary pairs in 18 languages from WikiHow. The gold-standard article-summary alignments across languages were extracted by aligning the images that are used to describe each how-to step in an article.\r\n- **Paper:** https:\/\/arxiv.org\/pdf\/2010.03093.pdf\r\n- **Data:** https:\/\/github.com\/esdurmus\/Wikilingua\r\n- **Motivation:** Included in the GEM shared task. Multilingual.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/834\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/834\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/833","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/833\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/833\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/833\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/833","id":740079692,"node_id":"MDU6SXNzdWU3NDAwNzk2OTI=","number":833,"title":"[GEM] add ASSET text simplification dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-10T16:56:30Z","updated_at":"2020-12-03T13:38:15Z","closed_at":"2020-12-03T13:38:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** ASSET\r\n- **Description:** ASSET is a crowdsourced\r\nmulti-reference corpus for assessing sentence simplification in English where each simplification was produced by executing several rewriting transformations.\r\n- **Paper:** https:\/\/www.aclweb.org\/anthology\/2020.acl-main.424.pdf\r\n- **Data:** https:\/\/github.com\/facebookresearch\/asset\r\n- **Motivation:** Included in the GEM shared task\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/833\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/833\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/832","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/832\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/832\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/832\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/832","id":740077228,"node_id":"MDU6SXNzdWU3NDAwNzcyMjg=","number":832,"title":"[GEM] add WikiAuto text simplification dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-10T16:53:23Z","updated_at":"2020-12-03T13:38:08Z","closed_at":"2020-12-03T13:38:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** WikiAuto\r\n- **Description:** Sentences in English Wikipedia and their corresponding sentences in Simple English Wikipedia that are written with simpler grammar and word choices. A lot of lexical and syntactic paraphrasing. \r\n- **Paper:** https:\/\/www.aclweb.org\/anthology\/2020.acl-main.709.pdf\r\n- **Data:** https:\/\/github.com\/chaojiang06\/wiki-auto\r\n- **Motivation:** Included in the GEM shared task\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/832\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/832\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/831","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/831\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/831\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/831\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/831","id":740071697,"node_id":"MDU6SXNzdWU3NDAwNzE2OTc=","number":831,"title":"[GEM] Add WebNLG dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-10T16:46:48Z","updated_at":"2020-12-03T13:38:01Z","closed_at":"2020-12-03T13:38:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** WebNLG\r\n- **Description:** WebNLG consists of Data\/Text pairs where the data is a set of triples extracted from DBpedia and the text is a verbalisation of these triples (16,095 data inputs and 42,873 data-text pairs). The data is available in English and Russian\r\n- **Paper:** https:\/\/www.aclweb.org\/anthology\/P17-1017.pdf\r\n- **Data:** https:\/\/webnlg-challenge.loria.fr\/download\/\r\n- **Motivation:** Included in the GEM shared task, multilingual\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/831\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/831\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/830","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/830\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/830\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/830\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/830","id":740065376,"node_id":"MDU6SXNzdWU3NDAwNjUzNzY=","number":830,"title":"[GEM] add ToTTo Table-to-text dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-10T16:38:34Z","updated_at":"2020-12-10T13:06:02Z","closed_at":"2020-12-10T13:06:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** ToTTo\r\n- **Description:** ToTTo is an open-domain English table-to-text dataset with over 120,000 training examples that proposes a controlled generation task: given a Wikipedia table and a set of highlighted table cells, produce a one-sentence description.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2004.14373\r\n- **Data:** https:\/\/github.com\/google-research-datasets\/totto\r\n- **Motivation:** Included in the GEM shared task\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/830\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/830\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/829","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/829\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/829\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/829\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/829","id":740061699,"node_id":"MDU6SXNzdWU3NDAwNjE2OTk=","number":829,"title":"[GEM] add Schema-Guided Dialogue","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-10T16:33:44Z","updated_at":"2020-12-03T13:37:50Z","closed_at":"2020-12-03T13:37:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** The Schema-Guided Dialogue Dataset\r\n- **Description:** The Schema-Guided Dialogue (SGD) dataset consists of over 20k annotated multi-domain, task-oriented conversations between a human and a virtual assistant. These conversations involve interactions with services and APIs spanning 20 domains, ranging from banks and events to media, calendar, travel, and weather.\r\n- **Paper:** https:\/\/arxiv.org\/pdf\/2002.01359.pdf https:\/\/arxiv.org\/pdf\/2004.15006.pdf\r\n- **Data:** https:\/\/github.com\/google-research-datasets\/dstc8-schema-guided-dialogue\r\n- **Motivation:** Included in the GEM shared task\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/829\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/829\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/828","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/828\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/828\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/828\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/828","id":740008683,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE4NTcwMjY3","number":828,"title":"Add writer_batch_size attribute to GeneratorBasedBuilder","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-10T15:28:19Z","updated_at":"2020-11-10T16:27:36Z","closed_at":"2020-11-10T16:27:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/828","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/828","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/828.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/828.patch","merged_at":"2020-11-10T16:27:35Z"},"body":"As specified in #741 one would need to specify a custom ArrowWriter batch size to avoid filling the RAM. Indeed the defaults buffer size is 10 000 examples but for multimodal datasets that contain images or videos we may want to reduce that.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/828\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/828\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/827","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/827\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/827\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/827\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/827","id":739983024,"node_id":"MDU6SXNzdWU3Mzk5ODMwMjQ=","number":827,"title":"[GEM] MultiWOZ dialogue dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-10T14:57:50Z","updated_at":"2020-12-12T13:42:30Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** MultiWOZ (Multi-Domain Wizard-of-Oz)\r\n- **Description:** 10k annotated human-human dialogues. Each dialogue consists of a goal, multiple user and system utterances as well as a belief state. Only system utterances are annotated with dialogue acts \u2013 there are no annotations from the user side.\r\n- **Paper:** https:\/\/arxiv.org\/pdf\/2007.12720.pdf\r\n- **Data:** https:\/\/github.com\/budzianowski\/multiwoz\r\n- **Motivation:** Will likely be part of the GEM shared task\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/827\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/827\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/826","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/826\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/826\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/826\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/826","id":739976716,"node_id":"MDU6SXNzdWU3Mzk5NzY3MTY=","number":826,"title":"[GEM] Add E2E dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-10T14:50:40Z","updated_at":"2020-12-03T13:37:57Z","closed_at":"2020-12-03T13:37:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** E2E NLG dataset (for End-to-end natural language generation)\r\n- **Description:**a dataset for training end-to-end, datadriven natural language generation systems in the restaurant domain, the datasets consists of 5,751 dialogue-act Meaning Representations (structured data) and 8.1 reference free-text utterances per dialogue-act on average\r\n- **Paper:** https:\/\/arxiv.org\/pdf\/1706.09254.pdf https:\/\/arxiv.org\/abs\/1901.07931\r\n- **Data:** http:\/\/www.macs.hw.ac.uk\/InteractionLab\/E2E\/#data\r\n- **Motivation:** This dataset will likely be included in the GEM shared task\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/826\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/826\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/825","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/825\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/825\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/825\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/825","id":739925960,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE4NTAyNjgx","number":825,"title":"Add accuracy, precision, recall and F1 metrics","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-10T13:50:35Z","updated_at":"2020-11-11T19:23:48Z","closed_at":"2020-11-11T19:23:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/825","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/825","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/825.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/825.patch","merged_at":"2020-11-11T19:23:43Z"},"body":"This PR adds several single metrics, namely:\r\n\r\n- Accuracy\r\n- Precision\r\n- Recall\r\n- F1\r\n\r\nThey all uses under the hood the sklearn metrics of the same name. They allow different useful features when training a multilabel\/multiclass model:\r\n- have a macro\/micro\/per label\/weighted\/binary\/per sample score\r\n- score only the selected labels (usually what we call the positive labels) and ignore the negative ones. For example in case of a Named Entity Recognition task, positive labels are (`PERSON`, `LOCATION` or `ORGANIZATION`) and the negative one is `O`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/825\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/825\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/824","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/824\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/824\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/824\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/824","id":739896526,"node_id":"MDU6SXNzdWU3Mzk4OTY1MjY=","number":824,"title":"Discussion using datasets in offline mode","user":{"login":"mandubian","id":77193,"node_id":"MDQ6VXNlcjc3MTkz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/77193?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mandubian","html_url":"https:\/\/github.com\/mandubian","followers_url":"https:\/\/api.github.com\/users\/mandubian\/followers","following_url":"https:\/\/api.github.com\/users\/mandubian\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mandubian\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mandubian\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mandubian\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mandubian\/orgs","repos_url":"https:\/\/api.github.com\/users\/mandubian\/repos","events_url":"https:\/\/api.github.com\/users\/mandubian\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mandubian\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-11-10T13:10:51Z","updated_at":"2021-01-20T14:05:04Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`datasets.load_dataset(\"csv\", ...)` breaks if you have no connection (There is already this issue https:\/\/github.com\/huggingface\/datasets\/issues\/761 about it). It seems to be the same for metrics too.\r\n\r\nI create this ticket to discuss a bit and gather what you have in mind or other propositions.\r\n\r\nHere are some points to open discussion:\r\n- if you want to prepare your code\/datasets on your machine (having internet connexion) but run it on another offline machine (not having internet connexion), it won't work as is, even if you have all files locally on this machine.\r\n- AFAIK, you can make it work if you manually put the python files (csv.py for example) on this offline machine and change your code to `datasets.load_dataset(\"MY_PATH\/csv.py\", ...)`. But it would be much better if you could run ths same code without modification if files are available locally.\r\n- I've also been considering the requirement of downloading Python code and execute on your machine to use datasets. This can be an issue in a professional context. Downloading a CSV\/H5 file is acceptable, downloading an executable script can open many security issues. We certainly need a mechanism to at least \"freeze\" the dataset code you retrieved once so that you can review it if you want and then be sure you use this one everywhere and not a version dowloaded from internet.\r\n \r\nWDYT? (thks)\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/824\/reactions","total_count":7,"+1":7,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/824\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/823","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/823\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/823\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/823\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/823","id":739815763,"node_id":"MDU6SXNzdWU3Mzk4MTU3NjM=","number":823,"title":"how processing in batch works in datasets ","user":{"login":"rabeehkarimimahabadi","id":73364383,"node_id":"MDQ6VXNlcjczMzY0Mzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73364383?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi","html_url":"https:\/\/github.com\/rabeehkarimimahabadi","followers_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-10T11:11:17Z","updated_at":"2020-11-10T13:11:10Z","closed_at":"2020-11-10T13:11:09Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI need to process my datasets before it is passed to dataloader in batch, \r\nhere is my codes \r\n\r\n```\r\nclass AbstractTask(ABC):\r\n task_name: str = NotImplemented\r\n preprocessor: Callable = NotImplemented\r\n split_to_data_split: Mapping[str, str] = NotImplemented\r\n tokenizer: Callable = NotImplemented\r\n max_source_length: str = NotImplemented\r\n max_target_length: str = NotImplemented\r\n # TODO: should not be a task item, but cannot see other ways.\r\n tpu_num_cores: int = None\r\n\r\n # The arguments set are for all tasks and needs to be kept common.\r\n def __init__(self, config):\r\n self.max_source_length = config['max_source_length']\r\n self.max_target_length = config['max_target_length']\r\n self.tokenizer = config['tokenizer']\r\n self.tpu_num_cores = config['tpu_num_cores']\r\n\r\n def _encode(self, batch) -> Dict[str, torch.Tensor]:\r\n batch_encoding = self.tokenizer.prepare_seq2seq_batch(\r\n [x[\"src_texts\"] for x in batch],\r\n tgt_texts=[x[\"tgt_texts\"] for x in batch],\r\n max_length=self.max_source_length,\r\n max_target_length=self.max_target_length,\r\n padding=\"max_length\" if self.tpu_num_cores is not None else \"longest\", # TPU hack\r\n return_tensors=\"pt\"\r\n )\r\n return batch_encoding.data\r\n\r\n\r\n def data_split(self, split):\r\n return self.split_to_data_split[split]\r\n\r\n def get_dataset(self, split, n_obs=None):\r\n split = self.data_split(split)\r\n if n_obs is not None:\r\n split = split+\"[:{}]\".format(n_obs)\r\n dataset = load_dataset(self.task_name, split=split)\r\n dataset = dataset.map(self.preprocessor, remove_columns=dataset.column_names)\r\n dataset = dataset.map(lambda batch: self._encode(batch), batched=True)\r\n dataset.set_format(type=\"torch\", columns=['input_ids', 'token_type_ids', 'attention_mask', 'label'])\r\n return dataset\r\n\r\n```\r\n\r\nI call it like \r\n\r\n`AutoTask.get(task, train_dataset_config).get_dataset(split=\"train\", n_obs=data_args.n_train) \r\n`\r\n\r\nThis gives the following error, to me because the data inside the dataset = dataset.map(lambda batch: self._encode(batch), batched=True) is not processed in batch, could you tell me how I can process dataset in batch inside my function? thanks \r\n\r\n File \"finetune_multitask_trainer.py\", line 192, in main\r\n if training_args.do_train else None\r\n File \"finetune_multitask_trainer.py\", line 191, in \r\n split=\"train\", n_obs=data_args.n_train) for task in data_args.task}\r\n File \"\/remote\/idiap.svm\/user.active\/rkarimi\/dev\/internship\/seq2seq\/tasks.py\", line 56, in get_dataset\r\n dataset = dataset.map(lambda batch: self._encode(batch), batched=True)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1236, in map\r\n update_data = does_function_return_dict(test_inputs, test_indices)\r\n File \"\/idiap\/user\/rkarimi\/libs\/anaconda3\/envs\/internship\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1207, in does_function_return_dict\r\n function(*fn_args, indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n File \"\/remote\/idiap.svm\/user.active\/rkarimi\/dev\/internship\/seq2seq\/tasks.py\", line 56, in \r\n dataset = dataset.map(lambda batch: self._encode(batch), batched=True)\r\n File \"\/remote\/idiap.svm\/user.active\/rkarimi\/dev\/internship\/seq2seq\/tasks.py\", line 37, in _encode\r\n [x[\"src_texts\"] for x in batch],\r\n File \"\/remote\/idiap.svm\/user.active\/rkarimi\/dev\/internship\/seq2seq\/tasks.py\", line 37, in \r\n [x[\"src_texts\"] for x in batch],\r\nTypeError: string indices must be integers\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/823\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/823\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/822","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/822\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/822\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/822\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/822","id":739579314,"node_id":"MDU6SXNzdWU3Mzk1NzkzMTQ=","number":822,"title":"datasets freezes ","user":{"login":"rabeehkarimimahabadi","id":73364383,"node_id":"MDQ6VXNlcjczMzY0Mzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73364383?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi","html_url":"https:\/\/github.com\/rabeehkarimimahabadi","followers_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-10T05:10:19Z","updated_at":"2020-11-12T23:23:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I want to load these two datasets and convert them to Dataset format in torch and the code freezes for me, could you have a look please? thanks \r\n\r\ndataset1 = load_dataset(\"squad\", split=\"train[:10]\")\r\ndataset1 = dataset1.set_format(type='torch', columns=['context', 'answers', 'question'])\r\n\r\ndataset2 = load_dataset(\"imdb\", split=\"train[:10]\")\r\ndataset2 = dataset2.set_format(type=\"torch\", columns=[\"text\", \"label\"])\r\nprint(len(dataset1))\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/822\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/822\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/821","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/821\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/821\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/821\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/821","id":739506859,"node_id":"MDU6SXNzdWU3Mzk1MDY4NTk=","number":821,"title":"`kor_nli` dataset doesn't being loaded properly","user":{"login":"sackoh","id":30492059,"node_id":"MDQ6VXNlcjMwNDkyMDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30492059?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sackoh","html_url":"https:\/\/github.com\/sackoh","followers_url":"https:\/\/api.github.com\/users\/sackoh\/followers","following_url":"https:\/\/api.github.com\/users\/sackoh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sackoh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sackoh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sackoh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sackoh\/orgs","repos_url":"https:\/\/api.github.com\/users\/sackoh\/repos","events_url":"https:\/\/api.github.com\/users\/sackoh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sackoh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-10T02:04:12Z","updated_at":"2020-11-16T13:59:12Z","closed_at":"2020-11-16T13:59:12Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"There are two issues from `kor_nli` dataset\r\n\r\n1. csv.DictReader failed to split features by tab\r\n - Should not exist `None` value in label feature, but there it is.\r\n ```python\r\n kor_nli_train['train'].unique('gold_label')\r\n # ['neutral', 'entailment', 'contradiction', None]\r\n ```\r\n - I found a reason why there is `None` values in label feature as following code\r\n ```python\r\n from datasets import load_dataset\r\n kor_nli_train = load_dataset('kor_nli', 'multi_nli')\r\n \r\n for idx, example in enumerate(kor_nli_train['train']):\r\n if example['gold_label'] is None:\r\n print(idx, example)\r\n break\r\n # 16835 {'gold_label': None, 'sentence1': '\uadf8\ub294 \uc804\uc7c1 \uc804\uc5d0 \uac00\ubcbc\uc6b4 \ubc85\uc2a4\ud0a8 \uc554\ub9d0\uc744 \uac00\uc9c0\uace0 \ub2ec\ub9ac\uae30 \uc704\ud574 \uc6b0\uc720\ucc98\ub7fc \ud558\uc580 \uc2a4\ud130\ub4dc\ub97c \ub123\uc5c8\ub2e4.\\t\uc804\uc7c1 \uc804\uc5d0 \ub2e4\uc778\uc885 \uc5ec\uc131\ub4e4\uacfc \ud568\uaed8 \uc788\ub294 \ubc31\uc778 \ub0a8\uc790\uac00 \uc788\uc5c8\ub2e4.\\tentailment\\n\uc2ac\ub9bc\uc740 \uc7ac\ube68\ub9ac \uc637\uc744 \uc785\uc5c8\uace0, \uc21c\uac04\uc801\uc73c\ub85c \ubbf8\uc9c0\uadfc\ud55c \ubb3c\uc744 \ubfcc\ub9b4 \uc218 \uc788\ub294 \uc544\uce68 \uc138\ud0c1\ubb3c\uc744 \uae30\uaebc\uc774 \uac00\ub450\uc5c8\ub2e4.\\t\uc2ac\ub9bc\uc740 \uc9c1\uc7a5\uc5d0 \ub2a6\uc5c8\ub2e4.\\tneutral\\n\ub274\uc695\uc5d0\uc11c \uadf8 \uc2dd\uc0ac\ub97c \ud574\ubd24\ub294\ub370, \uac70\uae30\uc11c \uc18c\uace0\uae30\uc758 \uba4b\uc9c4 \uc18c\uace0\uae30 \ubd80\ubd84\uc744 \uc694\ub9ac\ud558\uace0 \ubc14\ubca0\ud050\ub85c \ub9cc\ub4e0 \ub110\ube64\uc9c0 \uac19\uc740 \uac78 \uac00\uc838\uc654\ub294\ub370, \uc815\ub9d0 \ub300\ub2e8\ud574.\\t\uadf8\ub4e4\uc774 \uac70\uae30\uc11c \uc694\ub9ac\ud558\ub294 \uc1e0\uace0\uae30\ub294 \uc5ed\uacb9\ub2e4. \uac70\uae30\uc11c \uc808\ub300 \uba39\uc9c0 \ub9c8\ub77c.\\tcontradiction\\n\ud310\ub9e4\uc6d0\uc758 \uc8fd\uc74c\uc5d0\uc11c \ube0c\ub77c\uc774\uc5b8 \ub370\ub124\ud788... \ud06c\ub9ac\uc2a4 \ucf08\ub9ac\\t\ud06c\ub9ac\uc2a4 \ucf08\ub9ac\ub294 \uc138\uc77c\uc988\ub9e8\uc758 \uc8fd\uc74c\uc744 \uc5b8\uae09\ud558\uc9c0 \uc54a\ub294\ub2e4.\\tcontradiction\\n\uadf8\ub7ec\ub294 \ub3d9\uc548 \uc694\ub9ac\uc0ac\ub294 \uadf8\ub0e5 \ud654\uac00 \ub0ac\uc5b4.\\t\uc2a4\ud29c\uac00 \ub053\ub294 \ub3d9\uc548 \uc694\ub9ac\uc0ac\ub294 \ud654\uac00 \ub0ac\ub2e4.\\tneutral\\n\ub9c8\uc9c0\ub9c9 \ub85c\ub9c8\uc758 \ub9f9\uacf5\uaca9 \uc804\ub0a0 \ubc24, 900\uba85 \uc774\uc0c1\uc758 \uc720\ub300\uc778 \uc218\ube44\uc218\ub4e4\uc774 \ub85c\ub9c8\uc778\ub4e4\uc5d0\uac8c \uadf8\ub4e4\uc744 \uc0ac\ub85c\uc7a1\ub294 \uc2b9\ub9ac\ub97c \uc8fc\uae30 \ubcf4\ub2e4\ub294 \ub300\ub7c9 \uc790\uc0b4\uc744 \uc800\uc9c8\ub800\ub2e4.\\t\ub85c\ub9c8\uc778\ub4e4\uc774 \uadf8\ub4e4\uc758 \ud3ec\ud68d\uc5d0 \uc2b9\ub9ac\ud558\ub3c4\ub85d \ub0b4\ubc84\ub824\ub450\uae30 \ubcf4\ub2e4\ub294 900\uba85\uc758 \uc720\ub300\uc778 \uc218\ube44\uc218\ub4e4\uc774 \uc790\uc0b4\ud588\ub2e4.\\tentailment\\n\uc55e\uc73c\ub85c \ubc1c\uc0ac\ud558\ub77c.\\t\ubc1c\uc0ac.\\tneutral\\n\uadf8\ub9ac\uace0 \ub2f9\uc2e0\uc740 \uc6b0\ub9ac \ub545\uc774 \uc5d0\uc774\ucee4\uc5d0 \uc788\ub2e4\ub294 \uac83\uc744 \uc54c\uace0 \uc788\ub2e4. \uc6b0\ub9ac \uc0ac\ub78c\ub4e4\uc740 \uc5b4\ub5a4 \uac83\uc774 \uc5bc\ub9c8\ub098 \ub9ce\uc740\uc9c0 \uc774\ud574\ud558\uc9c0 \ubabb\ud560 \uac83\uc774\ub2e4.\\t\ubaa8\ub4e0 \uc0ac\ub78c\ub4e4\uc740 \uc6b0\ub9ac\uc758 \uce21\uc815 \uc2dc\uc2a4\ud15c\uc774 \uc5b4\ub5bb\uac8c \uc791\ub3d9\ud558\ub294\uc9c0 \uc54c\uace0 \uc774\ud574\ud569\ub2c8\ub2e4.\\tcontradiction\\n\uc8fc\ubbf8\uac8c\uc2a4\\tJumiyges\ub294 \ub3c4\uc2dc\uc758 \uc774\ub984\uc774\ub2e4.\\tneutral\\n\uc0ac\ub78c\uc740 \uc790\uae30 \ubbfc\uc871\uc744 \ub3cc\ubd10\uc57c \ud55c\ub2e4...\\t\uc0ac\ub78c\uc740 \uc870\uad6d\uc5d0 \uacf5\uac10\ud574\uc57c \ud55c\ub2e4.\\tentailment\\n\ub610\ud55c PDD 63\uc740 \uc815\ubd80\uc640 \uc5c5\uacc4\uac00 \ucef4\ud4e8\ud130 \uae30\ubc18 \uacf5\uaca9\uc5d0 \ub300\ud574 \uacbd\uace0\ud558\uace0 \ubc29\uc5b4\ud560 \uc900\ube44\ub97c \ub354 \uc798\ud560 \uc218 \uc788\ub3c4\ub85d \uc2dc\uc2a4\ud15c \ucde8\uc57d\uc131, \uc704\ud611, \uce68\uc785 \ubc0f \uc774\uc0c1\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uacf5\uc720\ud558\ub294 \uba54\ucee4\ub2c8\uc998\uc744 \uc218\ub9bd\ud558\ub294 \uac83\uc774 \uc911\uc694\ud558\ub2e4\ub294 \uac83\uc744 \uc778\uc2dd\ud588\uc2b5\ub2c8\ub2e4.\\t\uc815\ubcf4 \uc804\uc1a1 \ud504\ub85c\ud1a0\ucf5c\uc744 \ub9cc\ub4dc\ub294 \uac83\uc740 \uc911\uc694\ud558\ub2e4.\\tentailment\\n\uce74\ud398 \ub9c1 \ud53c\uc544\uc790 \ub378\ub77c \ub808\ud4cc\ube14\ub9ac\uce74 \ubc14\ub85c \ub0a8\ucabd\uc5d0\ub294 \ud53c\ub80c\uccb4\uac00 \uc54c\ub824\uc9c4 \uc9da \uc81c\ud488 \ub54c\ubb38\uc5d0 \ud55c\ub54c \uc2a4\ud2b8\ub85c \ub9c8\ucf13\uc774\ub77c\uace0 \ubd88\ub838\ub358 16\uc138\uae30 \ub85c\uc9c0\uc544\uc778 \uba54\ub974\uce74\ud1a0 \ub204\uc624\ubcf4(Mercato Nuovo)\uac00 \uc788\ub2e4.\\t\ud53c\uc544\uc790 \ub378\ub77c \ub808\ud4cc\ube14\ub9ac\uce74\uc5d0\ub294 \uce74\ud398\uac00 \ub9ce\uc774 \uc788\ub2e4.\\tentailment\\n\uc6b0\ub9ac\uac00 \uc5ec\uae30 \uc788\ub294 \ud55c \ud2b8\ub9b0\ud310\uc774 \ubb58 \uc8fc\uc6e0\ub294\uc9c0 \uc0b4\ud3b4\ubd10\uc57c\uaca0\uc5b4\\t\uc6b0\ub9ac\ub294 \ud2b8\ub9b0\ud310\uc774 \ubb34\uc5c7\uc744 \uc8fc\uc6e0\ub294\uc9c0 \ubcf4\ub294 \ub370 \uc2dc\uac04\uc744 \ub0ad\ube44\ud558\uc9c0 \uc54a\uc744 \uac83\uc774\ub2e4.\\tcontradiction\\n\uadf8\ub7ec\ub098 \ucf08\ud2b8\uc871\uc758 \ubb38\ud654\uc801 \uae30\ubc18\uc744 \uac00\uc9c4 \uc544\uc77c\ub79c\ub4dc \uad50\ud68c\ub294 \uc720\ub7fd\uc758 \uc2e0\ud765 \uae30\ub3c5\uad50 \uc138\uacc4\uc640\ub294 \ub2e4\ub974\uac8c \ubc1c\uc804\ud588\uace0 \uacb0\uad6d \ub85c\ub9c8\uc640 \uc911\uc559\uc9d1\uad8c\uc801 \ud589\uc815\uc73c\ub85c \ub300\uccb4\ub418\uc5c8\ub2e4.\\t\uc544\uc77c\ub79c\ub4dc \uad50\ud68c\uc5d0\ub294 \ucf08\ud2b8\uc871\uc758 \uae30\uc9c0\uac00 \uc788\uc5c8\ub2e4.\\tentailment\\n\uae00\uc384, \ub10c \uc120\ud0dd\uc758 \uc5ec\uc9c0\uac00 \uc5c6\uc5b4\\t\uae00\uc384, \ub108\uc5d0\uac90 \ub9ce\uc740 \uc120\ud0dd\uad8c\uc774 \uc788\uc5b4.\\tcontradiction\\n\uc0ac\uc2e4, \uacf5\uc2dd\uc801\uc778 \ubcf4\uc7a5\uc740 \uc5c6\ub2e4.\\t\ub0b4\uac00 \uc0b0 \ubb3c\uac74\uc5d0 \ub300\ud55c \ubcf4\uc99d\uc774 \uc5c6\uc5c8\ub2e4.\\tneutral\\n\ub35c \ud65c\uae30\ucc28\uae34 \ud558\uc9c0\ub9cc, \uc548\uc2dc\uc640 \ub974 \ubd80\ub974\uc82f\uc758 \uc0ac\ub791\uc2a4\ub7ec\uc6b4 \ud638\uc218\uc5d0\uc11c\ub3c4 \uc0b6\uc740 \ub611\uac19\uc774 \uc0c1\ucf8c\ud558\ub2e4.\\t\uc548\uc2dc\uc640 \ub974 \ubd80\ub974\uac9f\uc5d0\uc11c\ub294 \ud638\uc218\uc5d0\uc11c\uc758 \ud65c\ub3d9\uc774 \uc11c\ub450\ub974\uace0 \ubc14\uc05c \ubd84\uc704\uae30\ub97c \uc5f0\ucd9c\ud55c\ub2e4.\\tcontradiction\\n\uadf8\uc758 \uc5ec\ud589 \uc18c\uc2dd\uc774 \uc774\ubbf8 \ud37c\uc84c\ub2e4\uba74 \uacf5\uaca9 \uc18c\uc2dd\ub3c4 \ud37c\uc84c\uc744 \ud14c\uc9c0\ub9cc \ub9c8\uc744\uc5d0\uc11c\ub294 \uc804\ud600 \uacf5\ud669\uc758 \uae30\ubbf8\uac00 \ubcf4\uc774\uc9c0 \uc54a\uc558\ub2e4.\\t\uadf8\ub294 \uc65c \ub9c8\uc744\uc774 \ub2f9\ud669\ud558\uc9c0 \uc54a\uc558\ub294\uc9c0 \uc54c \uc218 \uc5c6\uc5c8\ub2e4.\\tneutral\\n\uacfc\uac70\uc5d0\ub294 \uc8fd\uc74c\uc758 \uc704\ud611\uc774 \ud1a0\uc9c0\uc758 \ud310\ub9e4\ub97c \ub9c9\ub294 \ub370 \uac70\uc758 \ub3c4\uc6c0\uc774 \ub418\uc9c0 \uc54a\uc558\ub2e4.\\t\ud1a0\uc9c0 \ud310\ub9e4\ub294 \uc5b4\ub5a0\ud55c \uc704\ud611\ub3c4 \uad50\ud658\ud558\uc9c0 \uc54a\uace0 \uc774\ub8e8\uc5b4\uc9c4\ub2e4.\\tcontradiction\\n\uc5b4\ub290 \uc2dc\uc810\uc5d0 \uc774\ub974\ub7ec \ub098\ub294 \uc9c0\uae08 \ub2e4\uac00\uc624\ub294 \uc0c8\ub85c\uc6b4 \uac83\ub4e4\uacfc \ub098\uc624\ub294 \ub9ce\uc740 \uc0c8\ub85c\uc6b4 \uac83\ub4e4\uc774 \ub0b4\uac00 \ub299\uc5b4\uac00\uace0 \uc788\ub2e4\uace0 \ub9d0\ud558\ub294 \uc2dc\ub300\ub85c \uc811\uc5b4\ub4e4\uace0 \uc788\ub2e4.\\t\ub098\ub294 \uc5ec\uc804\ud788 \ub0b4\uac00 \ubcf4\ub294 \ubaa8\ub4e0 \uc0c8\ub85c\uc6b4 \uac83\uc744 \uc0ac\ub791\ud55c\ub2e4.\\tcontradiction\\n\ub274\uc2a4\uc704\ud06c\ub294 \ubb3c\ub9ac\ud559\uc790\ub4e4\uc774 \uacbd\uae30\uc7a5 \ud589\uc0ac\uc5d0\uc11c \uace0\uc18d\ub3c4\ub85c\uc758 \uc790\ub3d9\ucc28 \uad50\ud1b5\uacfc \ubcf4\ud589\uc790 \uad50\ud1b5\uc744 \uac1c\uc120\ud558\uae30 \uc704\ud574 \uc0c8\ub5bc\uc758 \uc6c0\uc9c1\uc784\uc744 \uc5f0\uad6c\ud558\uace0 \uc788\ub2e4\uace0 \ub9d0\ud55c\ub2e4.\\t\uace0\uc18d\ub3c4\ub85c\uc758 \uc790\ub3d9\ucc28 \uad50\ud1b5 \ud750\ub984\uc744 \uac1c\uc120\ud558\ub294 \uac83\uc740 \ubb3c\ub9ac\ud559\uc790\ub4e4\uc774 \uc0c8\ub5bc\ub97c \uc5f0\uad6c\ud558\ub294 \uc774\uc720 \uc911 \ud558\ub098\uc774\ub2e4.\\tentailment\\n\uc5bc\ub9c8\ub098 \ub2e4\ub978\uac00? \uadf8\ub294 \uc7a0\uc2dc \ub9d0\uc744 \uba48\ucd94\uc5c8\ub2e4\uac00 \ub9d0\uc744 \uc774\uc5c8\ub2e4.\\t\uadf8\ub294 \uadf8 \uc18c\ub140\uac00 \uc5b4\ub514\uc5d0 \uc788\ub294\uc9c0 \uc54c\uace0 \uc2f6\uc5c8\ub2e4.\\tentailment\\n\uae00\uc384, \uadf8\uc5d0\uac8c \ub108\ubb34 \ub9ce\uc740 \uac83\uc744 \uc8fc\uc9c0\ub9c8.\\t\uadf8\ub294 \ud6e8\uc52c \ub354 \ub9ce\uc740 \uac83\uc744 \uc694\uad6c\ud560 \uac83\uc774\ub2e4.\\tneutral\\n\uc544\ubb34\ub9ac \uadf8\uc758 \ucc3d\uc791\ubb3c\uc774 \uc644\ubcbd\ud574 \ubcf4\uc778\ub2e4\uace0 \ud574\ub3c4, \uadf8\ub4e4\uc744 \ubbff\ub294 \uac83\uc740 \uc544\ub9c8\ub3c4 \uc88b\uc740 \uc0dd\uac01\uc774 \uc544\ub2d0 \uac83\uc774\ub2e4.\\'\\t\ub3c4\uc790\uae30\ub97c \uc798 \ub9cc\ub4e0\ub2e4\uace0 \ud574\uc11c \ub204\uad70\uac00\ub97c \ubbff\ub294 \uac83\uc740 \uc544\ub9c8 \uc88b\uc9c0 \uc54a\uc744 \uac83\uc774\ub2e4.\\tneutral\\n\ubc84\uc2a4\ud2c0\ub9c1 \uadf8\ub780 \ube44\uc544(Bustling Gran Via)\ub294 \ud638\ud154, \uc0c1\uc810, \uadf9\uc7a5, \ub098\uc774\ud2b8\ud074\ub7fd, \uce74\ud398 \ub4f1\uc774 \uc5b4\uc6b0\ub7ec\uc838 \uc0b0\ucc45\uacfc \ucc3d\uac00\ub97c \ubcfc \uc218 \uc788\ub2e4.\\tGran Via\ub294 \ud638\ud154, \uc0c1\uc810, \uadf9\uc7a5, \ub098\uc774\ud2b8\ud074\ub7fd, \uce74\ud398\uc758 \ubc88\ud654\ud55c \uc870\ud569\uc774\ub2e4.\\tentailment\\n\uc815\ubd80 \uc778\uc1c4\uc18c\\t\uadf8 \uc0ac\ubb34\uc2e4\uc740 \uc6cc\uc2f1\ud134\uc5d0 \uc704\uce58\ud574 \uc788\ub2e4.\\tneutral\\n\uc2e4\uc81c \ubb38\ud654 \uc804\uc7c1\uc774 \uc5b4\ub514 \uc788\ub294\uc9c0 \uc54c\uace0 \uc2f6\ub2e4\uba74 \ud559\uc6d0\uc744 \uc78a\uc5b4\ubc84\ub9ac\uace0 \uc2e4\ub9ac\ucf58 \ubc38\ub9ac\uc640 \ub808\ub4dc\ubaac\ub4dc\ub97c \uc0dd\uac01\ud574 \ubcf4\ub77c.\\t\uc2e4\uc81c \ubb38\ud654 \uc804\uc7c1\uc740 \ub808\ub4dc\ubaac\ub4dc\uc5d0\uc11c \uc77c\uc5b4\ub09c\ub2e4.\\tentailment\\n\uadf8\ub9ac\uace0 \ud398\ub2c8\uc2e4\ub9b0\uc744 \uc8fc\uc9c0 \uc54a\uae30 \uc704\ud574 \uce68\ub300 \uc704\uc5d0 \uc62c\ub824\ub1a8\uc5b4\\t\uadf8\ub140\uc758 \ubc29\uc5d0\ub294 \ud398\ub2c8\uc2e4\ub9b0\uc774 \uc5c6\ub2e4\ub294 \uc9d5\ud6c4\uac00 \uc804\ud600 \uc5c6\uc5c8\ub2e4.\\tcontradiction\\nL.A.\uc758 \uc57c\uc678 \uc2dc\uc7a5\uc744 \ud65c\ubcf4\ud558\ub294 \uac83\uc740 \ub9db\uc788\uace0 \uc800\ub834\ud55c \uadf8\ub8e8\ube0c\ub97c \uc7a1\uace0, \ub05d\uc774 \uc5c6\ub294 \ud587\ube5b\uc744 \uc990\uae30\uace0, \uc2e0\uc120\ud55c \ub18d\uc0b0\ubb3c, \uaf43, \ud5a5, \uadf8\ub9ac\uace0 \uac00\uc82f \uac08\ub85c\uc5b4\ub97c \uad6c\uc785\ud558\uba74\uc11c \ud604\uc9c0\uc778\ub4e4\uacfc \uc5b4\uc6b8\ub9b4 \uc218 \uc788\ub294 \ud6cc\ub96d\ud55c \ubc29\ubc95\uc774\ub2e4.\\tLA\uc758 \uc57c\uc678 \uc2dc\uc7a5\uc744 \ub3cc\uc544\ub2e4\ub2c8\ub294 \uac83\uc740 \uc2dc\uac04 \ub0ad\ube44\ub2e4.\\tcontradiction\\n\uc548\ub098\ub294 \ubc16\uc73c\ub85c \ub098\uc640 \uc548\ub3c4\uc758 \ud55c\uc228\uc744 \ub0b4\uc26c\uc5c8\ub2e4. \ub2e8 \ud55c \ubc88, \uadf8\ub9ac\uace0 \ub9c8\ub9ac\ud6c4\uc544\uc26c \ub9db\uc758 \uc220\ub85c \ub05d\ub0b4\uc790\ub294 \uacb0\uc2ec\uc774 \ub4a4\uc11e\uc5ec \uc788\uc5c8\ub2e4.\\t\uc548\ub098\ub294 \uc548\uc2ec\ud558\uace0 \ub9c8\ub9ac\ud6c4\uc544\uc26c \ub9db\uc758 \uc220\uc744 \ub2e4 \ub9c8\uc2dc\uae30\ub85c \uacb0\uc2ec\ud588\ub2e4.\\tentailment\\n5 \uc6d4\uc5d0 Vajpayee\ub294 \ud575 \uc2e4\ud5d8\uc758 \uc131\uacf5\uc801\uc778 \uc644\ub8cc\ub97c \ubc1c\ud45c\ud588\ub294\ub370, \uc778\ub3c4\uc778\ub4e4\uc740 \uc8fc\uad8c\uc758 \ud45c\uc2dc\ub85c \uc120\uc804\ud588\uc9c0\ub9cc \uc774\uc6c3 \uad6d\uac00\uc640 \uc11c\uad6c\uc640\uc758 \uc778\ub3c4 \uad00\uacc4\ub97c \ubcf5\uc7a1\ud558\uac8c \ub9cc\ub4e4 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\\t\uc778\ub3c4\ub294 \uc131\uacf5\uc801\uc778 \ud575\uc2e4\ud5d8\uc744 \ud55c \uc801\uc774 \uc5c6\ub2e4.\\tcontradiction\\n\ud50c\ub77c\ub178 \uc6d0\uc5d0\uc11c \ubcf4\ud1b5 \uc5bc\ub9c8\ub098 \ub9ce\uc740 \uac83\uc744 \uac00\uc9c0\uace0 \uc788\ub294\uac00?\\t\uc800 \uc0ac\ub78c\ub4e4 \uc911\uc5d0 \ud50c\ub77c\ub178 \uc6d0\uc5d0 \uac00\ubcf8 \uc0ac\ub78c \uc788\uc5b4?\\tcontradiction\\n\uadf8\uac83\uc758 \uc804\uccb4\uc801\uc778 \ud615\ud0dc\uc758 \uc6b0\uc544\ud568\uc740 \uc6b4\ud558 \uac74\ub108\ud3b8\uc5d0\uc11c \uac00\uc7a5 \uc798 \ubcfc \uc218 \uc788\ub2e4. \uc65c\ub0d0\ud558\uba74, \ub85c\ub9c8\uc5d0 \uc788\ub294 \uc131 \ubca0\ub4dc\ub85c\ucc98\ub7fc, \ub3d4\uc740 \uae38\ucb49\ud55c \ubcf8\ub2f9 \ub4a4\ub85c \ub354 \uac00\uae4c\uc6b4 \uacf3\uc5d0 \uc0ac\ub77c\uc9c0\uae30 \ub54c\ubb38\uc774\ub2e4.\\t\uc131 \ubca0\ub4dc\ub85c\uc758 \uae38\ucb49\ud55c \ubcf8\ub2f9\uc740 \ub3d4\uc744 \uac00\ub9b0\ub2e4.\\tentailment\\n\ub2f9\uc2e0\uc740 \uc218\ud2f4\uc774 \uc0b4\uc5d0 \uac15\ubc15\uc801\uc778 \uae30\uc068\uc744 \uac00\uc9c0\uace0 \ub204\ub4dc\ub97c \uadf8\ub9b4 \uac83\uc774\ub77c\uace0 \uc0dd\uac01\ud558\uaca0\uc9c0\ub9cc, \uc544\ub2c8\uc624; \uadf8\ub294 \uadf8\uc758 \ubaa8\ub4e0 \uacbd\ub825\uc5d0\uc11c \ub2e8 \ud55c \uc810\ub9cc\uc744 \uadf8\ub838\uace0, \uadf8\uac83\uc740 \uc0ac\uc18c\ud55c \uadf8\ub9bc\uc774\ub2e4.\\t\uadf8\ub294 \uadf8\uac83\uc774 \uadf8\ub97c \ubd88\ud3b8\ud558\uac8c \ub9cc\ub4e4\uc5c8\uae30 \ub54c\ubb38\uc5d0 \ud558\ub098\ub9cc \uadf8\ub838\ub2e4.\\tneutral\\n\uc774 \uc778\uc0c1\uc801\uc778 \ud48d\uacbd\uc740 \uc6d0\ub798 \ub098\ud3ec \ub808\uc628\uc774 \ub8e8\ube0c\ub974 \ubc15\ubb3c\uad00\uc758 \uce68\uc2e4\uc5d0\uc11c \ubcfc \uc218 \uc788\ub3c4\ub85d \uacc4\ud68d\ub418\uc5c8\ub294\ub370, \uadf8 \ub2f9\uc2dc \uad81\uc804\uc774\uc5c8\uc2b5\ub2c8\ub2e4.\\t\ub098\ud3f4\ub808\uc639\uc740 \uadf8\uc758 \ubaa8\ub4e0 \uad81\uc804\uc5d0 \uc788\ub294 \uadf8\uc758 \uce68\uc2e4\uc5d0\uc11c \ubcf4\ub294 \uacbd\uce58\uc5d0 \ub9ce\uc740 \uad00\uc2ec\uc744 \uac00\uc84c\ub2e4.\\tneutral\\n\uadf8\ub294 \uc6b0\ub9ac\uc5d0\uac8c \ubb38 \uc5f4\uc1e0\ub97c \uac74\ub124\uc8fc\uace0\ub294 \uae09\ud788 \ub5a0\ub0ac\ub2e4.\\t\uadf8\ub294 \uae34\uc7a5\ud574\uc11c \uc6b0\ub9ac\uc5d0\uac8c \uc5f4\uc1e0\ub97c \ube68\ub9ac \uc8fc\uc5c8\ub2e4.\\tneutral\\n\uc704\uc6d0\ud68c\ub294 \ub610\ud55c \ucd5c\uc885 \uaddc\uce59\uc744 OMB\uc5d0 \uc81c\ucd9c\ud588\ub2e4.\\t\uc704\uc6d0\ud68c\ub294 \ub610\ud55c \uc774 \uaddc\uce59\uc744 \ub2e4\ub978 \uadf8\ub8f9\uc5d0 \uc81c\ucd9c\ud588\uc9c0\ub9cc \ucd5c\uc885 \uaddc\uce59\uc740 OMB\uac00 \ud3c9\uac00\ud558\uae30 \uc704\ud55c \uac83\uc774 \uc5c8\uc2b5\ub2c8\ub2e4.\\tneutral\\n\uc815\uc6d0\uac00\uac8c\uc5d0 \uac00\ubcf4\uba74 \uc62c\ub9ac\ube44\uc544\uc758 \ubcf5\uc81c \ud654\ud569\ubb3c \uac19\uc740 \uc720\ucf8c\ud55c \uc774\ub984\uc744 \uac00\uc9c4 \uc81c\ud488\ub4e4\uc744 \ucc3e\uc744 \uc218 \uc788\uc744 \uac81\ub2c8\ub2e4.\uc774 \uc81c\ud488\uc774 \ubfcc\ub9ac\ub97c \ub0b4\ub9ac\ub3c4\ub85d \ub3d5\uae30 \uc704\ud574 \ucd2c\uc601\uc758 \uc808\ub2e8\ub41c \ub05d\uc5d0 \ub369\ud06c\uc29b\uc744 \ud558\ub294 \ud638\ub974\ubaac\uc758 \ud63c\ud569\ubb3c\uc774\uc8e0.\\t\uc815\uc6d0 \uac00\uafb8\uae30 \uac00\uac8c\uc758 \uc81c\ud488\ub4e4\uc740 \uc885\uc885 \uadf8\ub4e4\uc758 \ubaa9\uc801\uc744 \uc124\uba85\ud558\uae30 \uc704\ud574 \uae30\uc220\uc801\uc73c\ub85c\ub098 \uacfc\ud559\uc801\uc73c\ub85c \ud30c\uc0dd\ub41c \uc774\ub984(\uc62c\ub9ac\ube44\uc544\uc758 \ubcf5\uc81c \ud654\ud569\ubb3c\ucc98\ub7fc)\uc744 \ubd80\uc5ec\ubc1b\ub294\ub2e4.\\tneutral\\n\uc2a4\ud0c0\ub294 \uc2a4\ud2f8 \uc790\uc2e0\uc774\ub098 \uc65c \uadf8\ub140\uc758 \uc774\uc57c\uae30\ub97c \ubc14\uafb8\uc5c8\ub294\uc9c0\uc5d0 \ud6e8\uc52c \ub354 \uad00\uc2ec\uc774 \uc788\uc744 \uac83\uc774\ub2e4.\\t\uc2a4\ud2f8\uc758 \uc774\uc57c\uae30\ub294 \uc870\uae08\ub3c4 \ubcc0\ud558\uc9c0 \uc54a\uc558\ub2e4.\\tcontradiction\\n\ub0a8\ud3b8\uacfc\uc758 \ub9c8\uc9c0\ub9c9 \ub300\uacb0\ub85c \ub9e5\ud2f0\uc5b4\ub294 \ub178\ub77c\uc758 \ubcc0\uc2e0\uc744 \ub108\ubb34\ub098 \ub2a5\uc219\ud558\uac8c \uc608\uace0\ud574 \uc654\uae30 \ub54c\ubb38\uc5d0, \uadf8\ub140\uc5d0\uac8c\ub294 \ub2f9\ud669\uc2a4\ub7ec\uc6b8 \uc815\ub3c4\ub85c \uac11\uc791\uc2a4\ub7ec\uc6b4 \uac83\ucc98\ub7fc \ubcf4\uc774\uc9c0\ub9cc, \uc6b0\ub9ac\uc5d0\uac8c\ub294 \uac10\uc815\uc801\uc73c\ub85c \ubd88\uac00\ud53c\ud574 \ubcf4\uc778\ub2e4.\\t\ub178\ub77c\uc758 \ubcc0\uc2e0\uc740 \ubd84\uba85\ud558\uace0 \ud544\uc5f0\uc801\uc774\uc5c8\ub2e4.\\tcontradiction\\n\uc774\uc9d1\ud2b8 \ucd5c\ub0a8\ub2e8 \ub3c4\uc2dc\uc778 \uc544\uc2a4\uc644\uc740 \uc624\ub79c \uc5ed\uc0ac\ub97c \ud1b5\ud574 \uc911\uc694\ud55c \uc5ed\ud560\uc744 \ud574\uc654\ub2e4.\\t\uc544\uc2a4\uc644\uc740 \uc774\uc9d1\ud2b8 \uad6d\uacbd \ubc14\ub85c \uc704\uc5d0 \uc704\uce58\ud574 \uc788\uc2b5\ub2c8\ub2e4.\\tneutral\\n\uadf8\ub7ec\ub098 \ud6e8\uc52c \ub354 \uc6b0\uc544\ud55c \uac74\ucd95\uc801 \ud130\uce58\ub294 \uc2e0\uc131\ud55c \ucda4\uc778 Bharatanatyam\uc5d0\uc11c \uc218\ud589\ub41c 108 \uac00\uc9c0 \uae30\ubcf8 \ud3ec\uc988\ub97c \uc2dc\ubc14 \ud328\ub110\uc5d0\uc11c \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\\t\ud328\ub110\uc5d0 \ub300\ud55c \uc2dc\ubc14\uc758 \ubb18\uc0ac\ub294 \uc77c\ubc18\uc801\uc778 \ubaa8\ud2f0\ube0c\ub2e4.\\tneutral\\n\ud638\ud654\ub86d\uac8c \uc2ec\uc5b4\uc9c4 \uacc4\ub2e8\uc2dd \uc815\uc6d0\uc740 \uc774\ud0c8\ub9ac\uc544 \ud615\uc2dd\uc758 \uac00\uc7a5 \ud6cc\ub96d\ud55c \uc559\uc0c1\ube14 \uc911 \ud558\ub098\uc785\ub2c8\ub2e4.\\t\uc544\ub984\ub2e4\uc6b4 \uc815\uc6d0\uacfc \ud76c\uadc0\ud55c \uaf43\uaf42\uc774 \ubaa8\ub450 \uc774\ud0c8\ub9ac\uc544\uc758 \ud615\uc2dd\uc801\uc778 \uc2a4\ud0c0\uc77c\uc744 \ubcf4\uc5ec\uc900\ub2e4.\\tneutral\\n\uc74c, \uadf8\ub7ac\uc73c\uba74 \uc88b\uc558\uc744 \ud150\ub370\\t\ub098\ub294 \uadf8\uac83\uc744 \ub2e4\ub974\uac8c \ud560 \uae30\ud68c\ub97c \ubab9\uc2dc \uac08\ub9dd\ud55c\ub2e4.\\tentailment\\n\ud3d0\ud5c8\uac00 \ub41c \uc131\uc758 \uae30\uc2ad\uc5d0 \uc790\ub9ac\uc7a1\uace0 \uc788\ub294 \uc608\uc05c \uc911\uc138 \ub3c4\uc2dc \ucf00\uc774\uc11c\uc2a4\ubc84\uadf8\ub294 \ub178\ubca8 \ud3c9\ud654\uc0c1 \uc218\uc0c1\uc790 \uc54c\ubc84\ud2b8 \uc288\ubc14\uc774\ucc98(1875\ub144)\uc758 \ucd9c\uc0dd\uc9c0\ub85c \ub110\ub9ac \uc54c\ub824\uc838 \uc788\ub2e4.\\t\uc54c\ubc84\ud2b8 \uc288\ubc14\uc774\ucc98\ub294 \ub458 \ub2e4 \ucf00\uc774\uc11c\uc2a4\ubc84\uadf8 \ub9c8\uc744\uc5d0 \uc788\uc5c8\ub2e4.\\tentailment\\n\uace0\uac10\ub3c4\ub294 \ubb38\uc81c\uac00 \uc788\ub294 \ub300\ubd80\ubd84\uc758 \ud658\uc790\ub4e4\uc774 \ubc1c\uacac\ub420 \uac83\uc744 \ubcf4\uc7a5\ud55c\ub2e4.\\t\uc7a5\ube44 \ubbfc\uac10\ub3c4\ub294 \ubb38\uc81c \ud0d0\uc9c0\uc640 \uad00\ub828\uc774 \uc5c6\uc2b5\ub2c8\ub2e4.\\tcontradiction\\n\uc624\ub298\uc740 \ud655\uc2e4\ud788 \ubc18\ubc14\uc9c0 \uac19\uc740 \ub0a0\uc774\uc5c8\uc5b4\\t\uc624\ub298 \uc0ac\ubb34\uc2e4\uc5d0 \uc788\ub294 \ubaa8\ub4e0 \uc0ac\ub78c\ub4e4\uc740 \ubc18\ubc14\uc9c0\ub97c \uc785\uc5c8\ub2e4.\\tneutral\\n\ubabb\uc0dd\uae34 \ud131\uc2dc\ub3c4\ub97c \uc785\uace0.\\t\uadf8\uac83\uc740 \ubd84\ud64d\uc0c9\uacfc \uc8fc\ud669\uc0c9\uc785\ub2c8\ub2e4.\\tneutral\\n\uc774\uc8fc \ub178\ub3d9 \uc218\uc6a9\uc18c \uc624 \ub9c8\uc774 \uac13 \uadf8\ub4e4\uc740 \ud310\uc9c0 \uc0c1\uc790\uc5d0 \uc0b0\ub2e4.\\t\ub178\ub3d9 \uc218\uc6a9\uc18c\uc5d0\ub294 \ud310\uc9c0 \uc0c1\uc790\uc5d0 \uc0ac\ub294 \uc774\uc8fc \ub178\ub3d9\uc790\ub4e4\uc758 \uc0ac\uc9c4\uc774 \uc788\ub2e4.\\tneutral\\n\uadf8\ub798, \uadf8\uac00 \uc804 \uc138\uacc4\ub97c \uc5ec\ud589\ud55c \ud6c4\uc5d0 \uadf8\ub7f0 \uac70\uc57c\\t\uadf8\uac83\uc740 \uc0ac\ub78c\ub4e4\uc758 \uc138\uacc4 \uc5ec\ud589\uc744 \ub530\ub978\ub2e4.\\tentailment\\n\uac74\ub108\ud3b8\uc5d0 \ud06c\uace0 \ud070 \ucc38\ub098\ubb34 \uba87 \uadf8\ub8e8\uac00 \uc788\ub2e4.\\t\uc6b0\ub9ac\ub294 \uc5ec\uae30 \uc624\ud06c\ub098 \uc5b4\ub5a4 \uc885\ub958\uc758 \ubbf8\uad6d \ub098\ubb34\ub3c4 \uc5c6\ub2e4.\\tcontradiction\\nFort-de-France\uc5d0\uc11c \ucd9c\ubc1c\ud558\ub294 \uc790\ub3d9\ucc28\ub098 \uc5ec\uac1d\uc120\uc73c\ub85c, \ub2f9\uc2e0\uc740 \uc548\uc138 ? \ubc14\ub2e4 \ud3ec\ub3c4\uac00 \uadf8\ub298\uc744 \uc81c\uacf5\ud558\ub294 \ucf8c\uc801\ud55c \uac08\uc0c9 \ubaa8\ub798 \ud574\ubcc0\uacfc \ud53c\ud06c\ub2c9 \ud14c\uc774\ube14, \uc5b4\ub9b0\uc774 \ubbf8\ub044\ub7fc\ud2c0, \uc2dd\ub2f9\uc774 \uc788\ub294 \uc548\ub290\uc5d0 \ub3c4\ucc29\ud560 \uc218 \uc788\ub2e4.\\t\ud504\ub791\uc2a4 \uc694\uc0c8\uc5d0\uc11c \uc790\ub3d9\ucc28\ub098 \ud398\ub9ac\ub97c \ud0c0\uace0 \uc548\uc138\ub85c \uac08 \uc218 \uc788\ub2e4.\\tentailment\\n\uadf8\ub9ac\uace0 \uadf8\uac83\uc740 \uc568\ub77c\ubc30\ub9c8\uc8fc\uac00 \uc608\uc0c1\ud588\ub358 \ub300\ub85c \uc608\uc0b0\uc5d0\uc11c 50\ub9cc \ub2ec\ub7ec\ub97c \uc0ad\uac10\ud558\uc9c0 \uc54a\uc744 \uac83\uc774\ub77c\ub294 \uac83\uc744 \uc758\ubbf8\ud55c\ub2e4.\\t\uc568\ub77c\ubc30\ub9c8 \uc8fc\ub294 \uc608\uc0b0 \uc0ad\uac10\uc744 \ud558\uc9c0 \uc54a\uc558\ub2e4. \uc65c\ub0d0\ud558\uba74 \uadf8\ub807\uac8c \ud558\ub294 \uac83\uc5d0 \ub300\ud55c \ucd08\uae30 \uc815\ub2f9\uc131\uc774 \uc815\ubc00 \uc870\uc0ac\uc5d0 \ub9de\uc11c\uc9c0 \uc54a\uc558\uae30 \ub54c\ubb38\uc774\ub2e4.\\tneutral\\n\uc54c\uc558\uc5b4 \uba3c\uc800 \uc5b4 .. \uc5b4 .. \ub178\uc778\uc774\ub098 \uac00\uc871\uc744 \uc694\uc591\uc6d0\uc5d0 \ubcf4\ub0b4\ub294 \uac83\uc5d0 \ub300\ud574 \uc5b4\ub5bb\uac8c \uc0dd\uac01\ud558\ub2c8?\\t\uac00\uc871\uc744 \uc694\uc591\uc6d0\uc5d0 \ubcf4\ub0b4\uc11c \uc0ac\ub294 \uac83\uc5d0 \ub300\ud574 \uc5b4\ub5bb\uac8c \uc0dd\uac01\ud558\ub294\uc9c0 \uc54c \ud544\uc694\uac00 \uc5c6\ub2e4.\\tcontradiction\\n\ub098\uba38\uc9c0\ub294 \ub108\uc5d0\uac8c \ub2ec\ub838\uc5b4.\\t\ub098\uba38\uc9c0\ub294 \ub108\uc5d0\uac8c \ub2ec\ub838\uc9c0\ub9cc \uc2dc\uac04\uc774 \ub9ce\uc9c0 \uc54a\ub2e4.\\tneutral\\n\uc74c-\ud760, 3\uc6d4\uc5d0 \ud587\ubcd5\uc5d0 \ud0c0\ub294 \uac83\uc5d0 \ub300\ud574 \uac71\uc815\ud558\uba74 \uc548 \ub41c\ub2e4\ub294 \uac83\uc744 \uc54c\uace0 \uc788\ub294 3\uc6d4\uc774\uc57c.\\t3\uc6d4\uc740 \uadf8\ub807\uac8c \ub365\uc9c0 \uc54a\ub2e4.\\tneutral\\n\uadf8\ub9ac\uace0 \uc5b4, \uadf8\ub7f0 \uc791\uc740 \uac83\ub4e4\ub85c \ub2e4\uc2dc \uc2dc\uc791\ud574\ubd10. \uc544\uc9c1 \ud6e8\uc52c \uc2f8. \uc5b4, \uadf8 \ud2b9\ubcc4\ud55c \ubaa8\ub378 \ucc28\ub294 150\ub2ec\ub7ec\uc57c.\\t\uadf8 \ubaa8\ud615\ucc28\ub294 4\ucc9c \ub2ec\ub7ec\uac00 \ub4e0\ub2e4.\\tcontradiction\\n\ub0b4\uc77c \ub3cc\uc544\uac00\uc57c \ud55c\ub2e4\uba74, \uce7c\uc774 \ub9d0\ud588\ub2e4.\\t\ub3cc\uc544\uac08 \uc218 \uc5c6\uc5b4. \uc624\ub298\uc740 \uc548 \ub3fc. \ub0b4\uc77c\uc740 \uc548 \ub3fc. \uc808\ub300 \uc548 \ub3fc.\" \uce7c\uc774 \ub9d0\ud588\ub2e4.', 'sentence2': 'contradiction'}\r\n ```\r\n\r\n2. (Optional) Preferred to change the name of the features for the compatibility with `run_glue.py` in \ud83e\udd17 Transformers\r\n - `kor_nli` dataset has same data structure of multi_nli, xnli\r\n - Changing the name of features and the feature type of 'gold_label' to ClassLabel might be helpful\r\n ```python\r\n def _info(self):\r\n return datasets.DatasetInfo(\r\n description=_DESCRIPTION,\r\n features=datasets.Features(\r\n {\r\n \"premise\": datasets.Value(\"string\"),\r\n \"hypothesis\": datasets.Value(\"string\"),\r\n \"label\": datasets.features.ClassLabel(names=[\"entailment\", \"neutral\", \"contradiction\"]),\r\n } \r\n ),\r\n ```\r\n\r\nIf you don't mind, I would like to fix this.\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/821\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/821\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/820","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/820\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/820\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/820\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/820","id":739387617,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE4MDYwMjQ0","number":820,"title":"Update quail dataset to v1.3","user":{"login":"ngdodd","id":4889636,"node_id":"MDQ6VXNlcjQ4ODk2MzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4889636?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ngdodd","html_url":"https:\/\/github.com\/ngdodd","followers_url":"https:\/\/api.github.com\/users\/ngdodd\/followers","following_url":"https:\/\/api.github.com\/users\/ngdodd\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ngdodd\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ngdodd\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ngdodd\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ngdodd\/orgs","repos_url":"https:\/\/api.github.com\/users\/ngdodd\/repos","events_url":"https:\/\/api.github.com\/users\/ngdodd\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ngdodd\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-09T21:49:26Z","updated_at":"2020-11-10T09:06:35Z","closed_at":"2020-11-10T09:06:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/820","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/820","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/820.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/820.patch","merged_at":"2020-11-10T09:06:35Z"},"body":"Updated quail to most recent version, to address the problem originally discussed [here](https:\/\/github.com\/huggingface\/datasets\/issues\/806).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/820\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/820\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/819","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/819\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/819\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/819\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/819","id":739250624,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE3OTQ2MjYy","number":819,"title":"Make save function use deterministic global vars order","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-09T18:12:03Z","updated_at":"2021-11-30T13:34:09Z","closed_at":"2020-11-11T15:20:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/819","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/819","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/819.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/819.patch","merged_at":"2020-11-11T15:20:50Z"},"body":"The `dumps` function need to be deterministic for the caching mechanism.\r\nHowever in #816 I noticed that one of dill's method to recursively check the globals of a function may return the globals in different orders each time it's used. To fix that I sort the globals by key in the `globs` dictionary.\r\nI had to add a rectified `save_function` to the saving functions registry of the Pickler to make it work.\r\n\r\nThis should fix #816 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/819\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/819\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/818","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/818\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/818\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/818\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/818","id":739173861,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE3ODgzMzk0","number":818,"title":"Fix type hints pickling in python 3.6","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-09T16:27:47Z","updated_at":"2020-11-10T09:07:03Z","closed_at":"2020-11-10T09:07:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/818","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/818","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/818.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/818.patch","merged_at":"2020-11-10T09:07:01Z"},"body":"Type hints can't be properly pickled in python 3.6. This was causing errors the `run_mlm.py` script from `transformers` with python 3.6\r\n\r\nHowever Cloupickle proposed a [fix](https:\/\/github.com\/cloudpipe\/cloudpickle\/pull\/318\/files) to make it work anyway.\r\nThe idea is just to implement the pickling\/unpickling of parameterized type hints. There is one detail though: since in python 3.6 we can't use `isinstance` on type hints, then we can't use pickle saving functions registry directly. Therefore we just wrap the `save_global` method of the Pickler.\r\n\r\nThis should fix https:\/\/github.com\/huggingface\/transformers\/issues\/8212 for python 3.6 and make `run_mlm.py` support python 3.6\r\n\r\ncc @sgugger ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/818\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/818\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/817","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/817\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/817\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/817\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/817","id":739145369,"node_id":"MDU6SXNzdWU3MzkxNDUzNjk=","number":817,"title":"Add MRQA dataset","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-09T15:52:19Z","updated_at":"2020-12-04T15:44:42Z","closed_at":"2020-12-04T15:44:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** MRQA\r\n- **Description:** Collection of different (subsets of) QA datasets all converted to the same format to evaluate out-of-domain generalization (the datasets come from different domains, distributions, etc.). Some datasets are used for training and others are used for evaluation. This dataset was collected as part of MRQA 2019's shared task \r\n- **Paper:** https:\/\/arxiv.org\/abs\/1910.09753\r\n- **Data:** https:\/\/github.com\/mrqa\/MRQA-Shared-Task-2019\r\n- **Motivation:** Out-of-domain generalization is becoming (has become) a de-factor evaluation for NLU systems\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/817\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/817\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/816","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/816\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/816\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/816\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/816","id":739102686,"node_id":"MDU6SXNzdWU3MzkxMDI2ODY=","number":816,"title":"[Caching] Dill globalvars() output order is not deterministic and can cause cache issues.","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-09T15:01:20Z","updated_at":"2020-11-11T15:20:50Z","closed_at":"2020-11-11T15:20:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Dill uses `dill.detect.globalvars` to get the globals used by a function in a recursive dump. `globalvars` returns a dictionary of all the globals that a dumped function needs. However the order of the keys in this dict is not deterministic and can cause caching issues.\r\n\r\nTo fix that one could register an implementation of dill's `save_function` in the `datasets` pickler that sorts the globals keys before dumping a function.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/816\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/816\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/815","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/815\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/815\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/815\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/815","id":738842092,"node_id":"MDU6SXNzdWU3Mzg4NDIwOTI=","number":815,"title":"Is dataset iterative or not?","user":{"login":"rabeehkarimimahabadi","id":73364383,"node_id":"MDQ6VXNlcjczMzY0Mzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73364383?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi","html_url":"https:\/\/github.com\/rabeehkarimimahabadi","followers_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-11-09T09:11:48Z","updated_at":"2020-11-10T10:50:03Z","closed_at":"2020-11-10T10:50:03Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI want to use your library for large-scale training, I am not sure if this is implemented as iterative datasets or not?\r\ncould you provide me with example how I can use datasets as iterative datasets?\r\nthanks","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/815\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/815\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/814","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/814\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/814\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/814\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/814","id":738500443,"node_id":"MDU6SXNzdWU3Mzg1MDA0NDM=","number":814,"title":"Joining multiple datasets ","user":{"login":"rabeehkarimimahabadi","id":73364383,"node_id":"MDQ6VXNlcjczMzY0Mzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73364383?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi","html_url":"https:\/\/github.com\/rabeehkarimimahabadi","followers_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-08T16:19:30Z","updated_at":"2020-11-08T19:38:48Z","closed_at":"2020-11-08T19:38:48Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi\r\nI have multiple iterative datasets from your library with different size and I want to join them in a way that each datasets is sampled equally, so smaller datasets more, larger one less, could you tell me how to implement this in pytorch? thanks ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/814\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/814\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/813","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/813\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/813\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/813\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/813","id":738489852,"node_id":"MDU6SXNzdWU3Mzg0ODk4NTI=","number":813,"title":"How to implement DistributedSampler with datasets ","user":{"login":"rabeehkarimimahabadi","id":73364383,"node_id":"MDQ6VXNlcjczMzY0Mzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73364383?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi","html_url":"https:\/\/github.com\/rabeehkarimimahabadi","followers_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehkarimimahabadi\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-08T15:27:11Z","updated_at":"2021-10-25T10:36:39Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI am using your datasets to define my dataloaders, and I am training finetune_trainer.py in huggingface repo on them.\r\nI need a distributedSampler to be able to train the models on TPUs being able to distribute the load across the TPU cores. Could you tell me how I can implement the distribued sampler when using datasets in which datasets are iterative? To give you more context, I have multiple of datasets and I need to write sampler for this case. thanks. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/813\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/813\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/812","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/812\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/812\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/812\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/812","id":738340217,"node_id":"MDU6SXNzdWU3MzgzNDAyMTc=","number":812,"title":"Too much logging ","user":{"login":"dspoka","id":6183050,"node_id":"MDQ6VXNlcjYxODMwNTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6183050?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dspoka","html_url":"https:\/\/github.com\/dspoka","followers_url":"https:\/\/api.github.com\/users\/dspoka\/followers","following_url":"https:\/\/api.github.com\/users\/dspoka\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dspoka\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dspoka\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dspoka\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dspoka\/orgs","repos_url":"https:\/\/api.github.com\/users\/dspoka\/repos","events_url":"https:\/\/api.github.com\/users\/dspoka\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dspoka\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-11-07T23:56:30Z","updated_at":"2021-01-26T14:31:34Z","closed_at":"2020-11-16T17:06:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm doing this in the beginning of my script:\r\n\r\nfrom datasets.utils import logging as datasets_logging\r\ndatasets_logging.set_verbosity_warning()\r\n\r\nbut I'm still getting these logs:\r\n\r\n[2020-11-07 15:45:41,908][filelock][INFO] - Lock 139958278886176 acquired on \/home\/username\/.cache\/huggingface\/datasets\/cfe20ffaa80ef1c145a0a210d5b9cdce2b60002831e6ed0edc7ab9275d6f0d48.1bd4ccbce9de3dad0698d84674a19d6cc66a84db736a6398110bd196795dde7e.py.lock\r\n\r\n[2020-11-07 15:45:41,909][filelock][INFO] - Lock 139958278886176 released on \/home\/username\/.cache\/huggingface\/datasets\/cfe20ffaa80ef1c145a0a210d5b9cdce2b60002831e6ed0edc7ab9275d6f0d48.1bd4ccbce9de3dad0698d84674a19d6cc66a84db736a6398110bd196795dde7e.py.lock\r\n\r\nusing datasets version = 1.1.2","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/812\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/812\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/811","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/811\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/811\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/811\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/811","id":738280132,"node_id":"MDU6SXNzdWU3MzgyODAxMzI=","number":811,"title":"nlp viewer error","user":{"login":"jc-hou","id":30210529,"node_id":"MDQ6VXNlcjMwMjEwNTI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30210529?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jc-hou","html_url":"https:\/\/github.com\/jc-hou","followers_url":"https:\/\/api.github.com\/users\/jc-hou\/followers","following_url":"https:\/\/api.github.com\/users\/jc-hou\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jc-hou\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jc-hou\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jc-hou\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jc-hou\/orgs","repos_url":"https:\/\/api.github.com\/users\/jc-hou\/repos","events_url":"https:\/\/api.github.com\/users\/jc-hou\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jc-hou\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-07T17:08:58Z","updated_at":"2020-11-16T15:26:23Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello, \r\nwhen I select amazon_us_reviews in nlp viewer, it shows error.\r\nhttps:\/\/huggingface.co\/nlp\/viewer\/?dataset=amazon_us_reviews\r\n![image](https:\/\/user-images.githubusercontent.com\/30210529\/98447334-4aa81200-2124-11eb-9dca-82c3ab34ccc2.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/811\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/811\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/810","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/810\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/810\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/810\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/810","id":737878370,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE2ODQzMzQ3","number":810,"title":"Fix seqeval metric","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-06T16:11:43Z","updated_at":"2020-11-09T14:04:29Z","closed_at":"2020-11-09T14:04:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/810","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/810","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/810.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/810.patch","merged_at":"2020-11-09T14:04:27Z"},"body":"The current seqeval metric returns the following error when computed:\r\n```\r\n~\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/seqeval\/78a944d83252b5a16c9a2e49f057f4c6e02f18cc03349257025a8c9aea6524d8\/seqeval.py in _compute(self, predictions, references, suffix)\r\n 102 scores = {}\r\n 103 for type_name, score in report.items():\r\n--> 104 scores[type_name][\"precision\"] = score[\"precision\"]\r\n 105 scores[type_name][\"recall\"] = score[\"recall\"]\r\n 106 scores[type_name][\"f1\"] = score[\"f1-score\"]\r\n\r\nKeyError: 'LOC'\r\n```\r\nThis is because the current code basically tries to do:\r\n```\r\nscores = {}\r\nscores[\"LOC\"][\"precision\"] = some_value\r\n```\r\nwhich does not work in python. This PR fixes that while keeping the previous nested structure of results, with the same keys.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/810\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/810\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/809","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/809\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/809\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/809\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/809","id":737832701,"node_id":"MDU6SXNzdWU3Mzc4MzI3MDE=","number":809,"title":"Add Google Taskmaster dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-06T15:10:41Z","updated_at":"2021-04-20T13:09:26Z","closed_at":"2021-04-20T13:09:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Taskmaster\r\n- **Description:** A large dataset of task-oriented dialogue with annotated goals (55K dialogues covering entertainment and travel reservations)\r\n- **Paper:** https:\/\/arxiv.org\/abs\/1909.05358\r\n- **Data:** https:\/\/github.com\/google-research-datasets\/Taskmaster\r\n- **Motivation:** One of few annotated datasets of this size for goal-oriented dialogue\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/809\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/809\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/808","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/808\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/808\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/808\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/808","id":737638942,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE2NjQ0NDc0","number":808,"title":"dataset(dgs): initial dataset loading script","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-06T10:14:43Z","updated_at":"2021-03-23T06:18:55Z","closed_at":"2021-03-23T06:18:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/808","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/808","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/808.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/808.patch","merged_at":null},"body":"When trying to create dummy data I get:\r\n\r\n> Dataset datasets with config None seems to already open files in the method `_split_generators(...)`. You might consider to instead only open files in the method `_generate_examples(...)` instead. If this is not possible the dummy data has t o be created with less guidance. Make sure you create the file dummy_data.\r\n\r\nI am not sure how to manually create the dummy_data (what exactly it should contain)\r\n\r\nAlso note, this library says:\r\n> ImportError: To be able to use this dataset, you need to install the following dependencies['pympi'] using 'pip install pympi' for instance'\r\n\r\nWhen you actually need to `pip install pympi-ling`\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/808\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/808\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/807","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/807\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/807\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/807\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/807","id":737509954,"node_id":"MDU6SXNzdWU3Mzc1MDk5NTQ=","number":807,"title":"load_dataset for LOCAL CSV files report CONNECTION ERROR","user":{"login":"shexuan","id":25664170,"node_id":"MDQ6VXNlcjI1NjY0MTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25664170?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shexuan","html_url":"https:\/\/github.com\/shexuan","followers_url":"https:\/\/api.github.com\/users\/shexuan\/followers","following_url":"https:\/\/api.github.com\/users\/shexuan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shexuan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shexuan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shexuan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shexuan\/orgs","repos_url":"https:\/\/api.github.com\/users\/shexuan\/repos","events_url":"https:\/\/api.github.com\/users\/shexuan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shexuan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-11-06T06:33:04Z","updated_at":"2021-01-11T01:30:27Z","closed_at":"2020-11-14T05:30:34Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## load_dataset for LOCAL CSV files report CONNECTION ERROR\r\n- **Description:** \r\nA local demo csv file:\r\n```\r\nimport pandas as pd\r\nimport numpy as np\r\nfrom datasets import load_dataset\r\nimport torch\r\nimport transformers\r\n\r\ndf = pd.DataFrame(np.arange(1200).reshape(300,4))\r\ndf.to_csv('test.csv', header=False, index=False)\r\n\r\nprint('datasets version: ', datasets.__version__)\r\nprint('pytorch version: ', torch.__version__)\r\nprint('transformers version: ', transformers.__version__)\r\n\r\n# output:\r\ndatasets version: 1.1.2\r\npytorch version: 1.5.0\r\ntransformers version: 3.2.0\r\n```\r\n\r\nwhen I load data through `dataset`:\r\n```\r\ndataset = load_dataset('csv', data_files='.\/test.csv', delimiter=',', autogenerate_column_names=False)\r\n```\r\nError infos:\r\n```\r\nConnectionError Traceback (most recent call last)\r\n in \r\n----> 1 dataset = load_dataset('csv', data_files='.\/test.csv', delimiter=',', autogenerate_column_names=False)\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, save_infos, script_version, **config_kwargs)\r\n 588 # Download\/copy dataset processing script\r\n 589 module_path, hash = prepare_module(\r\n--> 590 path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n 591 )\r\n 592 \r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, **download_kwargs)\r\n 266 file_path = hf_github_url(path=path, name=name, dataset=dataset, version=script_version)\r\n 267 try:\r\n--> 268 local_path = cached_path(file_path, download_config=download_config)\r\n 269 except FileNotFoundError:\r\n 270 if script_version is not None:\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py in cached_path(url_or_filename, download_config, **download_kwargs)\r\n 306 user_agent=download_config.user_agent,\r\n 307 local_files_only=download_config.local_files_only,\r\n--> 308 use_etag=download_config.use_etag,\r\n 309 )\r\n 310 elif os.path.exists(url_or_filename):\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag)\r\n 473 elif response is not None and response.status_code == 404:\r\n 474 raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\n--> 475 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n 476 \r\n 477 # Try a second time\r\n\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.2\/datasets\/csv\/csv.py\r\n```\r\n\r\nAnd I try to connect to the site with requests:\r\n```\r\nimport requests\r\n\r\nrequests.head(\"https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.2\/datasets\/csv\/csv.py\")\r\n```\r\n\r\nSimilarly Error occurs:\r\n```\r\n---------------------------------------------------------------------------\r\nConnectionRefusedError Traceback (most recent call last)\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/urllib3\/connection.py in _new_conn(self)\r\n 159 conn = connection.create_connection(\r\n--> 160 (self._dns_host, self.port), self.timeout, **extra_kw\r\n 161 )\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/urllib3\/util\/connection.py in create_connection(address, timeout, source_address, socket_options)\r\n 83 if err is not None:\r\n---> 84 raise err\r\n 85 \r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/urllib3\/util\/connection.py in create_connection(address, timeout, source_address, socket_options)\r\n 73 sock.bind(source_address)\r\n---> 74 sock.connect(sa)\r\n 75 return sock\r\n\r\nConnectionRefusedError: [Errno 111] Connection refused\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nNewConnectionError Traceback (most recent call last)\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/urllib3\/connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\r\n 676 headers=headers,\r\n--> 677 chunked=chunked,\r\n 678 )\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/urllib3\/connectionpool.py in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)\r\n 380 try:\r\n--> 381 self._validate_conn(conn)\r\n 382 except (SocketTimeout, BaseSSLError) as e:\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/urllib3\/connectionpool.py in _validate_conn(self, conn)\r\n 975 if not getattr(conn, \"sock\", None): # AppEngine might not have `.sock`\r\n--> 976 conn.connect()\r\n 977 \r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/urllib3\/connection.py in connect(self)\r\n 307 # Add certificate verification\r\n--> 308 conn = self._new_conn()\r\n 309 hostname = self.host\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/urllib3\/connection.py in _new_conn(self)\r\n 171 raise NewConnectionError(\r\n--> 172 self, \"Failed to establish a new connection: %s\" % e\r\n 173 )\r\n\r\nNewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nMaxRetryError Traceback (most recent call last)\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/requests\/adapters.py in send(self, request, stream, timeout, verify, cert, proxies)\r\n 448 retries=self.max_retries,\r\n--> 449 timeout=timeout\r\n 450 )\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/urllib3\/connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\r\n 724 retries = retries.increment(\r\n--> 725 method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]\r\n 726 )\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/urllib3\/util\/retry.py in increment(self, method, url, response, error, _pool, _stacktrace)\r\n 438 if new_retry.is_exhausted():\r\n--> 439 raise MaxRetryError(_pool, url, error or ResponseError(cause))\r\n 440 \r\n\r\nMaxRetryError: HTTPSConnectionPool(host='raw.githubusercontent.com', port=443): Max retries exceeded with url: \/huggingface\/datasets\/1.1.2\/datasets\/csv\/csv.py (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused',))\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nConnectionError Traceback (most recent call last)\r\n in \r\n 1 import requests\r\n 2 \r\n----> 3 requests.head(\"https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.2\/datasets\/csv\/csv.py\")\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/requests\/api.py in head(url, **kwargs)\r\n 102 \r\n 103 kwargs.setdefault('allow_redirects', False)\r\n--> 104 return request('head', url, **kwargs)\r\n 105 \r\n 106 \r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/requests\/api.py in request(method, url, **kwargs)\r\n 59 # cases, and look like a memory leak in others.\r\n 60 with sessions.Session() as session:\r\n---> 61 return session.request(method=method, url=url, **kwargs)\r\n 62 \r\n 63 \r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/requests\/sessions.py in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)\r\n 528 }\r\n 529 send_kwargs.update(settings)\r\n--> 530 resp = self.send(prep, **send_kwargs)\r\n 531 \r\n 532 return resp\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/requests\/sessions.py in send(self, request, **kwargs)\r\n 641 \r\n 642 # Send the request\r\n--> 643 r = adapter.send(request, **kwargs)\r\n 644 \r\n 645 # Total elapsed time of the request (approximately)\r\n\r\n~\/.conda\/envs\/py36\/lib\/python3.6\/site-packages\/requests\/adapters.py in send(self, request, stream, timeout, verify, cert, proxies)\r\n 514 raise SSLError(e, request=request)\r\n 515 \r\n--> 516 raise ConnectionError(e, request=request)\r\n 517 \r\n 518 except ClosedPoolError as e:\r\n\r\nConnectionError: HTTPSConnectionPool(host='raw.githubusercontent.com', port=443): Max retries exceeded with url: \/huggingface\/datasets\/1.1.2\/datasets\/csv\/csv.py (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused',))\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/807\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/807\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/806","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/806\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/806\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/806\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/806","id":737215430,"node_id":"MDU6SXNzdWU3MzcyMTU0MzA=","number":806,"title":"Quail dataset urls are out of date","user":{"login":"ngdodd","id":4889636,"node_id":"MDQ6VXNlcjQ4ODk2MzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4889636?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ngdodd","html_url":"https:\/\/github.com\/ngdodd","followers_url":"https:\/\/api.github.com\/users\/ngdodd\/followers","following_url":"https:\/\/api.github.com\/users\/ngdodd\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ngdodd\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ngdodd\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ngdodd\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ngdodd\/orgs","repos_url":"https:\/\/api.github.com\/users\/ngdodd\/repos","events_url":"https:\/\/api.github.com\/users\/ngdodd\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ngdodd\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-05T19:40:19Z","updated_at":"2020-11-10T14:02:51Z","closed_at":"2020-11-10T14:02:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"

Code<\/h3>\r\n\r\n```\r\nfrom datasets import load_dataset\r\nquail = load_dataset('quail')\r\n```\r\n\r\n

Error<\/h3>\r\n\r\n```\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/text-machine-lab\/quail\/master\/quail_v1.2\/xml\/ordered\/quail_1.2_train.xml\r\n```\r\n\r\n\r\nAs per [quail v1.3 commit](https:\/\/github.com\/text-machine-lab\/quail\/commit\/506501cfa34d9ec6c042d31026ba6fea6bcec8ff) it looks like the location and suggested ordering has changed. In [https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/quail\/quail.py#L52-L58](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/quail\/quail.py#L52-L58) the quail v1.2 datasets are being pointed to, which don't exist anymore.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/806\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/806\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/805","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/805\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/805\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/805\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/805","id":737019360,"node_id":"MDU6SXNzdWU3MzcwMTkzNjA=","number":805,"title":"On loading a metric from datasets, I get the following error","user":{"login":"laibamehnaz","id":36405283,"node_id":"MDQ6VXNlcjM2NDA1Mjgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36405283?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/laibamehnaz","html_url":"https:\/\/github.com\/laibamehnaz","followers_url":"https:\/\/api.github.com\/users\/laibamehnaz\/followers","following_url":"https:\/\/api.github.com\/users\/laibamehnaz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/laibamehnaz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/laibamehnaz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/laibamehnaz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/laibamehnaz\/orgs","repos_url":"https:\/\/api.github.com\/users\/laibamehnaz\/repos","events_url":"https:\/\/api.github.com\/users\/laibamehnaz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/laibamehnaz\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-05T15:14:38Z","updated_at":"2020-11-09T09:12:35Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`from datasets import load_metric`\r\n\r\n`metric = load_metric('bleurt')`\r\n\r\nTraceback:\r\n210 class _ArrayXDExtensionType(pa.PyExtensionType):\r\n 211 \r\n 212 ndims: int = None\r\n\r\nAttributeError: module 'pyarrow' has no attribute 'PyExtensionType'\r\n\r\nAny help will be appreciated. Thank you. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/805\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/805\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/804","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/804\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/804\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/804\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/804","id":736858507,"node_id":"MDU6SXNzdWU3MzY4NTg1MDc=","number":804,"title":"Empty output\/answer in TriviaQA test set (both in 'kilt_tasks' and 'trivia_qa')","user":{"login":"PaulLerner","id":25532159,"node_id":"MDQ6VXNlcjI1NTMyMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25532159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulLerner","html_url":"https:\/\/github.com\/PaulLerner","followers_url":"https:\/\/api.github.com\/users\/PaulLerner\/followers","following_url":"https:\/\/api.github.com\/users\/PaulLerner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulLerner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulLerner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulLerner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulLerner\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulLerner\/repos","events_url":"https:\/\/api.github.com\/users\/PaulLerner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulLerner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-05T11:38:01Z","updated_at":"2020-11-09T14:14:59Z","closed_at":"2020-11-09T14:14:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"# The issue\r\n\r\nIt's all in the title, it appears to be fine on the train and validation sets.\r\n\r\nIs there some kind of mapping to do like for the questions (see https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/kilt_tasks\/README.md) ? \r\n\r\n# How to reproduce\r\n```py\r\nfrom datasets import load_dataset\r\nkilt_tasks = load_dataset(\"kilt_tasks\")\r\ntrivia_qa = load_dataset('trivia_qa', 'unfiltered.nocontext')\r\n# both in \"kilt_tasks\"\r\nIn [18]: any([output['answer'] for output in kilt_tasks['test_triviaqa']['output']]) \r\nOut[18]: False\r\n# and \"trivia_qa\"\r\nIn [13]: all([answer['value'] == '' for answer in trivia_qa['test']['answer']]) \r\nOut[13]: True\r\n# appears to be fine on the train and validation sets.\r\nIn [14]: all([answer['value'] == '' for answer in trivia_qa['train']['answer']]) \r\nOut[14]: False\r\n\r\nIn [15]: all([answer['value'] == '' for answer in trivia_qa['validation']['answer']]) \r\nOut[15]: False\r\n\r\nIn [16]: any([output['answer'] for output in kilt_tasks['train_triviaqa']['output']]) \r\nOut[16]: True\r\n\r\nIn [17]: any([output['answer'] for output in kilt_tasks['validation_triviaqa']['output']]) \r\nOut[17]: True\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/804\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/804\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/803","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/803\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/803\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/803\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/803","id":736818917,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE1OTY1ODE2","number":803,"title":"fix: typos in tutorial to map KILT and TriviaQA","user":{"login":"PaulLerner","id":25532159,"node_id":"MDQ6VXNlcjI1NTMyMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25532159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulLerner","html_url":"https:\/\/github.com\/PaulLerner","followers_url":"https:\/\/api.github.com\/users\/PaulLerner\/followers","following_url":"https:\/\/api.github.com\/users\/PaulLerner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulLerner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulLerner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulLerner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulLerner\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulLerner\/repos","events_url":"https:\/\/api.github.com\/users\/PaulLerner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulLerner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-05T10:42:00Z","updated_at":"2020-11-10T09:08:07Z","closed_at":"2020-11-10T09:08:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/803","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/803","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/803.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/803.patch","merged_at":"2020-11-10T09:08:07Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/803\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/803\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/802","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/802\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/802\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/802\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/802","id":736296343,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE1NTM1MDI0","number":802,"title":"Add XGlue","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-04T17:29:54Z","updated_at":"2020-12-01T15:58:28Z","closed_at":"2020-12-01T15:58:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/802","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/802","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/802.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/802.patch","merged_at":"2020-12-01T15:58:27Z"},"body":"Dataset is ready to merge. An important feature of this dataset is that for each config the train data is in English, while dev and test data are in multiple languages. Therefore, @lhoestq and I decided offline that we will give the dataset the following API, *e.g.* for \r\n\r\n```python\r\nload_dataset(\"xglue\", \"ner\") # would give the splits 'train', 'validation.en', 'test.en', 'validation.es', 'test.es', ... \r\n```\r\n=> therefore one can load a single language test via\r\n\r\n```python\r\nload_dataset(\"xglue\", \"ner\", split=\"test.es\")\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/802\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/802\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/801","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/801\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/801\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/801\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/801","id":735790876,"node_id":"MDU6SXNzdWU3MzU3OTA4NzY=","number":801,"title":"How to join two datasets?","user":{"login":"shangw-nvidia","id":66387198,"node_id":"MDQ6VXNlcjY2Mzg3MTk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66387198?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shangw-nvidia","html_url":"https:\/\/github.com\/shangw-nvidia","followers_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/followers","following_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/orgs","repos_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/repos","events_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shangw-nvidia\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-04T03:53:11Z","updated_at":"2020-12-23T14:02:58Z","closed_at":"2020-12-23T14:02:58Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI'm wondering if it's possible to join two (preprocessed) datasets with the same number of rows but different labels? \r\n\r\nI'm currently trying to create paired sentences for BERT from `wikipedia\/'20200501.en`, and I couldn't figure out a way to create a paired sentence using `.map()` where the second sentence is **not** the next sentence (i.e., from a different article) of the first sentence.\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/801\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/801\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/800","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/800\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/800\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/800\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/800","id":735772775,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE1MTAyMjc3","number":800,"title":"Update loading_metrics.rst","user":{"login":"ayushidalmia","id":5400513,"node_id":"MDQ6VXNlcjU0MDA1MTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5400513?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ayushidalmia","html_url":"https:\/\/github.com\/ayushidalmia","followers_url":"https:\/\/api.github.com\/users\/ayushidalmia\/followers","following_url":"https:\/\/api.github.com\/users\/ayushidalmia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ayushidalmia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ayushidalmia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ayushidalmia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ayushidalmia\/orgs","repos_url":"https:\/\/api.github.com\/users\/ayushidalmia\/repos","events_url":"https:\/\/api.github.com\/users\/ayushidalmia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ayushidalmia\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-04T02:57:11Z","updated_at":"2020-11-11T15:28:32Z","closed_at":"2020-11-11T15:28:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/800","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/800","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/800.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/800.patch","merged_at":"2020-11-11T15:28:32Z"},"body":"Minor bug","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/800\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/800\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/799","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/799\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/799\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/799\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/799","id":735551165,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE0OTIzNDMx","number":799,"title":"switch amazon reviews class label order","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-03T18:38:58Z","updated_at":"2020-11-03T18:44:14Z","closed_at":"2020-11-03T18:44:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/799","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/799","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/799.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/799.patch","merged_at":"2020-11-03T18:44:10Z"},"body":"Switches the label order to be more intuitive for amazon reviews, #791.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/799\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/799\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/798","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/798\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/798\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/798\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/798","id":735518805,"node_id":"MDU6SXNzdWU3MzU1MTg4MDU=","number":798,"title":"Cannot load TREC dataset: ConnectionError","user":{"login":"kaletap","id":25740957,"node_id":"MDQ6VXNlcjI1NzQwOTU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25740957?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kaletap","html_url":"https:\/\/github.com\/kaletap","followers_url":"https:\/\/api.github.com\/users\/kaletap\/followers","following_url":"https:\/\/api.github.com\/users\/kaletap\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kaletap\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kaletap\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kaletap\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kaletap\/orgs","repos_url":"https:\/\/api.github.com\/users\/kaletap\/repos","events_url":"https:\/\/api.github.com\/users\/kaletap\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kaletap\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-11-03T17:45:22Z","updated_at":"2021-11-19T11:47:52Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Problem\r\nI cannot load \"trec\" dataset, it results with ConnectionError as shown below. I've tried on both Google Colab and locally. \r\n* `requests.head('http:\/\/cogcomp.org\/Data\/QA\/QC\/train_5500.label')` returns . \r\n* `requests.head('http:\/\/cogcomp.org\/Data\/QA\/QC\/train_5500.label', allow_redirects=True)` raises `requests.exceptions.TooManyRedirects: Exceeded 30 redirects.`\r\n* Opening `http:\/\/cogcomp.org\/Data\/QA\/QC\/train_5500.label' in a browser works, but opens a different address\r\n* Increasing max_redirects to 100 doesn't help\r\n\r\nAlso, while debugging I've seen that requesting 'https:\/\/storage.googleapis.com\/huggingface-nlp\/cache\/datasets\/trec\/default\/1.1.0\/dataset_info.json' returns before, but it doesn't raise any errors. Not sure if that's relevant.\r\n\r\n* datasets.__version__ == '1.1.2'\r\n* requests.__version__ == '2.24.0'\r\n\r\n## Error trace\r\n```\r\n>>> import datasets\r\n>>> datasets.__version__\r\n'1.1.2'\r\n>>> dataset = load_dataset(\"trec\", split=\"train\")\r\nUsing custom data configuration default\r\nDownloading and preparing dataset trec\/default (download: 350.79 KiB, generated: 403.39 KiB, post-processed: Unknown size, total: 754.18 KiB) to \/home\/przemyslaw\/.cache\/huggingface\/datasets\/trec\/default\/1.1.0\/ca4248481ad244f235f4cf277186cad2ee8769f975119a2bbfc41b8932b88bd7...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/przemyslaw\/.local\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/home\/przemyslaw\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/przemyslaw\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 531, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/przemyslaw\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/trec\/ca4248481ad244f235f4cf277186cad2ee8769f975119a2bbfc41b8932b88bd7\/trec.py\", line 140, in _split_generators\r\n dl_files = dl_manager.download_and_extract(_URLs)\r\n File \"\/home\/przemyslaw\/.local\/lib\/python3.6\/site-packages\/datasets\/utils\/download_manager.py\", line 254, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/home\/przemyslaw\/.local\/lib\/python3.6\/site-packages\/datasets\/utils\/download_manager.py\", line 179, in download\r\n num_proc=download_config.num_proc,\r\n File \"\/home\/przemyslaw\/.local\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/home\/przemyslaw\/.local\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/home\/przemyslaw\/.local\/lib\/python3.6\/site-packages\/datasets\/utils\/py_utils.py\", line 163, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/home\/przemyslaw\/.local\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/home\/przemyslaw\/.local\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 475, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach http:\/\/cogcomp.org\/Data\/QA\/QC\/train_5500.label\r\n```\r\n\r\nI would appreciate some suggestions here. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/798\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/798\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/797","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/797\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/797\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/797\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/797","id":735420332,"node_id":"MDU6SXNzdWU3MzU0MjAzMzI=","number":797,"title":"Token classification labels are strings and we don't have the list of labels","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067401494,"node_id":"MDU6TGFiZWwyMDY3NDAxNDk0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Dataset%20discussion","name":"Dataset discussion","color":"72f99f","default":false,"description":"Discussions on the datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-03T15:33:30Z","updated_at":"2020-11-10T14:07:11Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Not sure if this is an issue we want to fix or not, putting it here so it's not forgotten. Right now, in token classification datasets, the labels for NER, POS and the likes are typed as `Sequence` of `strings`, which is wrong in my opinion. These should be `Sequence` of `ClassLabel` or some types that gives easy access to the underlying labels.\r\n\r\nThe main problem for preprocessing those datasets is that the list of possible labels is not stored inside the `Dataset` object which makes converting the labels to IDs quite difficult (you either have to know the list of labels in advance or run a full pass through the dataset to get the list of labels, the `unique` method being useless with the type `Sequence[str]`).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/797\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/797\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/796","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/796\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/796\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/796\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/796","id":735414881,"node_id":"MDU6SXNzdWU3MzU0MTQ4ODE=","number":796,"title":"Seq2Seq Metrics QOL: Bleu, Rouge","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-11-03T15:26:29Z","updated_at":"2021-01-28T14:13:48Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Putting all my QOL issues here, idt I will have time to propose fixes, but I didn't want these to be lost, in case they are useful. I tried using `rouge` and `bleu` for the first time and wrote down everything I didn't immediately understand:\r\n\r\n+ Bleu expects tokenization, can I just kwarg it like sacrebleu?\r\n+ different signatures, means that I would have had to add a lot of conditionals + pre and post processing: if I were going to replace the `calculate_rouge` and `calculate_bleu` functions here: https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/seq2seq\/utils.py#L61\r\n\r\n\r\n#### What I tried\r\n\r\n\r\nRouge experience:\r\n```python\r\n\r\nrouge = load_metric('rouge')\r\nrouge.add_batch(['hi im sam'], ['im daniel']) # fails\r\nrouge.add_batch(predictions=['hi im sam'], references=['im daniel']) # works\r\nrouge.compute() # huge messy output, but reasonable. Not worth integrating b\/c don't want to rewrite all the postprocessing.\r\n```\r\n\r\nBLEU experience:\r\n```python\r\nbleu = load_metric('bleu')\r\nbleu.add_batch(predictions=['hi im sam'], references=['im daniel'])\r\nbleu.add_batch(predictions=[['hi im sam']], references=[['im daniel']])\r\n\r\nbleu.add_batch(predictions=[['hi im sam']], references=[['im daniel']])\r\n```\r\nAll of these raise `ValueError: Got a string but expected a list instead: 'im daniel'`\r\n\r\n#### Doc Typo\r\nThis says `dataset=load_metric(...)` which seems wrong, will cause `NameError`\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/6045025\/98004483-ff0d0580-1dbd-11eb-9f35-6f35904611bb.png)\r\n\r\ncc @lhoestq, feel free to ignore.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/796\/reactions","total_count":6,"+1":6,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/796\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/795","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/795\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/795\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/795\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/795","id":735198265,"node_id":"MDU6SXNzdWU3MzUxOTgyNjU=","number":795,"title":"Descriptions of raw and processed versions of wikitext are inverted","user":{"login":"fraboniface","id":16835358,"node_id":"MDQ6VXNlcjE2ODM1MzU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16835358?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fraboniface","html_url":"https:\/\/github.com\/fraboniface","followers_url":"https:\/\/api.github.com\/users\/fraboniface\/followers","following_url":"https:\/\/api.github.com\/users\/fraboniface\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fraboniface\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fraboniface\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fraboniface\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fraboniface\/orgs","repos_url":"https:\/\/api.github.com\/users\/fraboniface\/repos","events_url":"https:\/\/api.github.com\/users\/fraboniface\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fraboniface\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-03T10:24:51Z","updated_at":"2020-11-10T14:05:45Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Nothing of importance, but it looks like the descriptions of wikitext-n-v1 and wikitext-n-raw-v1 are inverted for both n=2 and n=103. I just verified by loading them and the `` tokens are present in the non-raw versions, which confirms that it's a mere inversion of the descriptions and not of the datasets themselves.\r\n\r\nAlso it would be nice if those descriptions appeared in the dataset explorer.\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/87bd0864845ea0a1dd7167918dc5f341bf807bd3\/datasets\/wikitext\/wikitext.py#L52","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/795\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/795\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/794","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/794\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/794\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/794\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/794","id":735158725,"node_id":"MDU6SXNzdWU3MzUxNTg3MjU=","number":794,"title":"self.options cannot be converted to a Python object for pickling","user":{"login":"hzqjyyx","id":9635713,"node_id":"MDQ6VXNlcjk2MzU3MTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9635713?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hzqjyyx","html_url":"https:\/\/github.com\/hzqjyyx","followers_url":"https:\/\/api.github.com\/users\/hzqjyyx\/followers","following_url":"https:\/\/api.github.com\/users\/hzqjyyx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hzqjyyx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hzqjyyx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hzqjyyx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hzqjyyx\/orgs","repos_url":"https:\/\/api.github.com\/users\/hzqjyyx\/repos","events_url":"https:\/\/api.github.com\/users\/hzqjyyx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hzqjyyx\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-03T09:27:34Z","updated_at":"2020-11-19T17:35:38Z","closed_at":"2020-11-19T17:35:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nCurrently I am trying to load csv file with customized read_options. And the latest master seems broken if we pass the ReadOptions object.\r\n\r\nHere is a code snippet\r\n```python\r\nfrom datasets import load_dataset\r\nfrom pyarrow.csv import ReadOptions\r\nload_dataset(\"csv\", data_files=[\"out.csv\"], read_options=ReadOptions(block_size=16*1024*1024))\r\n```\r\nerror is `self.options cannot be converted to a Python object for pickling`\r\nWould you mind to take a look? Thanks!\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n----> 1 load_dataset(\"csv\", data_files=[\"out.csv\"], read_options=ReadOptions(block_size=16*1024*1024))\r\n\r\n\/tmp\/datasets\/src\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, save_infos, script_version, **config_kwargs)\r\n 602 hash=hash,\r\n 603 features=features,\r\n--> 604 **config_kwargs,\r\n 605 )\r\n 606 \r\n\r\n\/tmp\/datasets\/src\/datasets\/builder.py in __init__(self, cache_dir, name, hash, features, **config_kwargs)\r\n 162 name,\r\n 163 custom_features=features,\r\n--> 164 **config_kwargs,\r\n 165 )\r\n 166 \r\n\r\n\/tmp\/datasets\/src\/datasets\/builder.py in _create_builder_config(self, name, custom_features, **config_kwargs)\r\n 281 )\r\n 282 else:\r\n--> 283 suffix = Hasher.hash(config_kwargs_to_add_to_suffix)\r\n 284 \r\n 285 if builder_config.data_files is not None:\r\n\r\n\/tmp\/datasets\/src\/datasets\/fingerprint.py in hash(cls, value)\r\n 51 return cls.dispatch[type(value)](cls, value)\r\n 52 else:\r\n---> 53 return cls.hash_default(value)\r\n 54 \r\n 55 def update(self, value):\r\n\r\n\/tmp\/datasets\/src\/datasets\/fingerprint.py in hash_default(cls, value)\r\n 44 @classmethod\r\n 45 def hash_default(cls, value):\r\n---> 46 return cls.hash_bytes(dumps(value))\r\n 47 \r\n 48 @classmethod\r\n\r\n\/tmp\/datasets\/src\/datasets\/utils\/py_utils.py in dumps(obj)\r\n 365 file = StringIO()\r\n 366 with _no_cache_fields(obj):\r\n--> 367 dump(obj, file)\r\n 368 return file.getvalue()\r\n 369 \r\n\r\n\/tmp\/datasets\/src\/datasets\/utils\/py_utils.py in dump(obj, file)\r\n 337 def dump(obj, file):\r\n 338 \"\"\"pickle an object to a file\"\"\"\r\n--> 339 Pickler(file, recurse=True).dump(obj)\r\n 340 return\r\n 341 \r\n\r\n~\/.local\/lib\/python3.6\/site-packages\/dill\/_dill.py in dump(self, obj)\r\n 444 raise PicklingError(msg)\r\n 445 else:\r\n--> 446 StockPickler.dump(self, obj)\r\n 447 stack.clear() # clear record of 'recursion-sensitive' pickled objects\r\n 448 return\r\n\r\n\/usr\/lib\/python3.6\/pickle.py in dump(self, obj)\r\n 407 if self.proto >= 4:\r\n 408 self.framer.start_framing()\r\n--> 409 self.save(obj)\r\n 410 self.write(STOP)\r\n 411 self.framer.end_framing()\r\n\r\n\/usr\/lib\/python3.6\/pickle.py in save(self, obj, save_persistent_id)\r\n 474 f = self.dispatch.get(t)\r\n 475 if f is not None:\r\n--> 476 f(self, obj) # Call unbound method with explicit self\r\n 477 return\r\n 478 \r\n\r\n~\/.local\/lib\/python3.6\/site-packages\/dill\/_dill.py in save_module_dict(pickler, obj)\r\n 931 # we only care about session the first pass thru\r\n 932 pickler._session = False\r\n--> 933 StockPickler.save_dict(pickler, obj)\r\n 934 log.info(\"# D2\")\r\n 935 return\r\n\r\n\/usr\/lib\/python3.6\/pickle.py in save_dict(self, obj)\r\n 819 \r\n 820 self.memoize(obj)\r\n--> 821 self._batch_setitems(obj.items())\r\n 822 \r\n 823 dispatch[dict] = save_dict\r\n\r\n\/usr\/lib\/python3.6\/pickle.py in _batch_setitems(self, items)\r\n 850 k, v = tmp[0]\r\n 851 save(k)\r\n--> 852 save(v)\r\n 853 write(SETITEM)\r\n 854 # else tmp is empty, and we're done\r\n\r\n\/usr\/lib\/python3.6\/pickle.py in save(self, obj, save_persistent_id)\r\n 494 reduce = getattr(obj, \"__reduce_ex__\", None)\r\n 495 if reduce is not None:\r\n--> 496 rv = reduce(self.proto)\r\n 497 else:\r\n 498 reduce = getattr(obj, \"__reduce__\", None)\r\n\r\n~\/.local\/lib\/python3.6\/site-packages\/pyarrow\/_csv.cpython-36m-x86_64-linux-gnu.so in pyarrow._csv.ReadOptions.__reduce_cython__()\r\n\r\nTypeError: self.options cannot be converted to a Python object for pickling\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/794\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/794\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/793","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/793\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/793\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/793\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/793","id":735105907,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE0NTU2NzY5","number":793,"title":"[Datasets] fix discofuse links","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-03T08:03:45Z","updated_at":"2020-11-03T08:16:41Z","closed_at":"2020-11-03T08:16:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/793","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/793","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/793.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/793.patch","merged_at":"2020-11-03T08:16:40Z"},"body":"The discofuse links were changed: https:\/\/github.com\/google-research-datasets\/discofuse\/commit\/d27641016eb5b3eb2af03c7415cfbb2cbebe8558. \r\nThe old links are broken\r\n\r\nI changed the links and created the new dataset_infos.json.\r\n\r\nPinging @thomwolf @lhoestq for notification.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/793\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/793\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/792","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/792\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/792\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/792\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/792","id":734693652,"node_id":"MDU6SXNzdWU3MzQ2OTM2NTI=","number":792,"title":"KILT dataset: empty string in triviaqa input field","user":{"login":"PaulLerner","id":25532159,"node_id":"MDQ6VXNlcjI1NTMyMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25532159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulLerner","html_url":"https:\/\/github.com\/PaulLerner","followers_url":"https:\/\/api.github.com\/users\/PaulLerner\/followers","following_url":"https:\/\/api.github.com\/users\/PaulLerner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulLerner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulLerner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulLerner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulLerner\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulLerner\/repos","events_url":"https:\/\/api.github.com\/users\/PaulLerner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulLerner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-02T17:33:54Z","updated_at":"2020-11-05T10:34:59Z","closed_at":"2020-11-05T10:34:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"# What happened\r\nBoth train and test splits of the triviaqa dataset (part of the KILT benchmark) seem to have empty string in their input field (unlike the natural questions dataset, part of the same benchmark)\r\n\r\n# Versions\r\nKILT version is `1.0.0`\r\n`datasets` version is `1.1.2`\r\n[more here](https:\/\/gist.github.com\/PaulLerner\/3768c8d25f723edbac20d99b6a4056c1)\r\n\r\n# How to reproduce\r\n```py\r\nIn [1]: from datasets import load_dataset\r\nIn [4]: dataset = load_dataset(\"kilt_tasks\") \r\n# everything works fine, removed output for a better readibility\r\nDataset kilt_tasks downloaded and prepared to \/people\/lerner\/.cache\/huggingface\/datasets\/kilt_tasks\/all_tasks\/1.0.0\/821c4295a2c35db2847585918d9c47d7f028f1a26b78825d8e77cd3aeb2621a1. Subsequent calls will reuse this data.\r\n\r\n# empty string in triviaqa input field\r\nIn [36]: dataset['train_triviaqa'][0] \r\nOut[36]: \r\n{'id': 'dpql_5197',\r\n 'input': '',\r\n 'meta': {'left_context': '',\r\n 'mention': '',\r\n 'obj_surface': {'text': []},\r\n 'partial_evidence': {'end_paragraph_id': [],\r\n 'meta': [],\r\n 'section': [],\r\n 'start_paragraph_id': [],\r\n 'title': [],\r\n 'wikipedia_id': []},\r\n 'right_context': '',\r\n 'sub_surface': {'text': []},\r\n 'subj_aliases': {'text': []},\r\n 'template_questions': {'text': []}},\r\n 'output': {'answer': ['five \u00a3', '5 \u00a3', '\u00a35', 'five \u00a3'],\r\n 'meta': [],\r\n 'provenance': [{'bleu_score': [1.0],\r\n 'end_character': [248],\r\n 'end_paragraph_id': [30],\r\n 'meta': [],\r\n 'section': ['Section::::Question of legal tender.\\n'],\r\n 'start_character': [246],\r\n 'start_paragraph_id': [30],\r\n 'title': ['Banknotes of the pound sterling'],\r\n 'wikipedia_id': ['270680']}]}}\r\nIn [35]: dataset['train_triviaqa']['input'][:10] \r\nOut[35]: ['', '', '', '', '', '', '', '', '', '']\r\n# same with test set \r\nIn [37]: dataset['test_triviaqa']['input'][:10] \r\nOut[37]: ['', '', '', '', '', '', '', '', '', '']\r\n# works fine with natural questions\r\nIn [34]: dataset['train_nq']['input'][:10] \r\nOut[34]: \r\n['how i.met your mother who is the mother',\r\n 'who had the most wins in the nfl',\r\n 'who played mantis guardians of the galaxy 2',\r\n 'what channel is the premier league on in france',\r\n \"god's not dead a light in the darkness release date\",\r\n 'who is the current president of un general assembly',\r\n 'when do the eclipse supposed to take place',\r\n 'what is the name of the sea surrounding dubai',\r\n 'who holds the nba record for most points in a career',\r\n 'when did the new maze runner movie come out']\r\n```\r\n\r\nStay safe :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/792\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/792\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/791","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/791\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/791\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/791\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/791","id":734656518,"node_id":"MDExOlB1bGxSZXF1ZXN0NTE0MTg0MzU5","number":791,"title":"add amazon reviews","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-11-02T16:42:57Z","updated_at":"2020-11-03T20:15:06Z","closed_at":"2020-11-03T16:43:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/791","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/791","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/791.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/791.patch","merged_at":"2020-11-03T16:43:57Z"},"body":"Adds the Amazon US Reviews dataset as requested in #353. Converted from [TensorFlow Datasets](https:\/\/www.tensorflow.org\/datasets\/catalog\/amazon_us_reviews). cc @clmnt @sshleifer ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/791\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":2,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/791\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/790","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/790\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/790\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/790\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/790","id":734470197,"node_id":"MDU6SXNzdWU3MzQ0NzAxOTc=","number":790,"title":"Error running pip install -e \".[dev]\" on MacOS 10.13.6: faiss\/python does not exist","user":{"login":"shawwn","id":59632,"node_id":"MDQ6VXNlcjU5NjMy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59632?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shawwn","html_url":"https:\/\/github.com\/shawwn","followers_url":"https:\/\/api.github.com\/users\/shawwn\/followers","following_url":"https:\/\/api.github.com\/users\/shawwn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shawwn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shawwn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shawwn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shawwn\/orgs","repos_url":"https:\/\/api.github.com\/users\/shawwn\/repos","events_url":"https:\/\/api.github.com\/users\/shawwn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shawwn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-11-02T12:36:35Z","updated_at":"2020-11-10T14:05:02Z","closed_at":"2020-11-10T14:05:02Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I was following along with https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html#adding-tests-and-metadata-to-the-dataset when I ran into this error.\r\n\r\n```sh\r\ngit clone https:\/\/github.com\/huggingface\/datasets\r\ncd datasets\r\nvirtualenv venv -p python3 --system-site-packages\r\nsource venv\/bin\/activate\r\npip install -e \".[dev]\"\r\n```\r\n\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/59632\/97868518-72871800-1cd5-11eb-9cd2-37d4e9d20b39.png)\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/59632\/97868592-977b8b00-1cd5-11eb-8f3c-0c409616149c.png)\r\n\r\nPython 3.7.7\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/790\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/790\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/789","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/789\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/789\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/789\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/789","id":734237839,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEzODM1MzE0","number":789,"title":"dataset(ncslgr): add initial loading script","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-11-02T06:50:10Z","updated_at":"2020-12-01T13:41:37Z","closed_at":"2020-12-01T13:41:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/789","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/789","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/789.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/789.patch","merged_at":null},"body":"Its a small dataset, but its heavily annotated\r\nhttps:\/\/www.bu.edu\/asllrp\/ncslgr.html\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/5757359\/97838609-3c539380-1ce9-11eb-885b-a15d4c91ea49.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/789\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/789\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/788","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/788\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/788\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/788\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/788","id":734136124,"node_id":"MDU6SXNzdWU3MzQxMzYxMjQ=","number":788,"title":"failed to reuse cache","user":{"login":"WangHexie","id":31768052,"node_id":"MDQ6VXNlcjMxNzY4MDUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31768052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/WangHexie","html_url":"https:\/\/github.com\/WangHexie","followers_url":"https:\/\/api.github.com\/users\/WangHexie\/followers","following_url":"https:\/\/api.github.com\/users\/WangHexie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/WangHexie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/WangHexie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/WangHexie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/WangHexie\/orgs","repos_url":"https:\/\/api.github.com\/users\/WangHexie\/repos","events_url":"https:\/\/api.github.com\/users\/WangHexie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/WangHexie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-11-02T02:42:36Z","updated_at":"2020-11-02T12:26:15Z","closed_at":"2020-11-02T12:26:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I packed the `load_dataset ` in a function of class, and cached data in a directory. But when I import the class and use the function, the data still have to be downloaded again. The information (Downloading and preparing dataset cnn_dailymail\/3.0.0 (download: 558.32 MiB, generated: 1.28 GiB, post-processed: Unknown size, total: 1.82 GiB) to ******) which logged to terminal shows the path is right to the cache directory, but the files still have to be downloaded again.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/788\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/788\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/787","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/787\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/787\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/787\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/787","id":734070162,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEzNjk5MTQz","number":787,"title":"Adding nli_tr dataset","user":{"login":"e-budur","id":2246791,"node_id":"MDQ6VXNlcjIyNDY3OTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2246791?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/e-budur","html_url":"https:\/\/github.com\/e-budur","followers_url":"https:\/\/api.github.com\/users\/e-budur\/followers","following_url":"https:\/\/api.github.com\/users\/e-budur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/e-budur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/e-budur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/e-budur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/e-budur\/orgs","repos_url":"https:\/\/api.github.com\/users\/e-budur\/repos","events_url":"https:\/\/api.github.com\/users\/e-budur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/e-budur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-11-01T21:49:44Z","updated_at":"2020-11-12T19:06:02Z","closed_at":"2020-11-12T19:06:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/787","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/787","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/787.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/787.patch","merged_at":"2020-11-12T19:06:02Z"},"body":"Hello,\r\n\r\nIn this pull request, we have implemented the necessary interface to add our recent dataset [NLI-TR](https:\/\/github.com\/boun-tabi\/NLI-TR). The datasets will be presented on a full paper at EMNLP 2020 this month. [[arXiv link] ](https:\/\/arxiv.org\/pdf\/2004.14963.pdf)\r\n\r\nThe dataset is the neural machine translation of SNLI and MultiNLI datasets into Turkish. So, we followed a similar format with the original datasets hosted in the HuggingFace datasets hub. \r\n\r\nOur dataset is designed to be accessed as follows by following the interface of the GLUE dataset that provides multiple datasets in a single interface over the HuggingFace datasets hub. \r\n\r\n```\r\nfrom datasets import load_dataset\r\nmultinli_tr = load_dataset(\"nli_tr\", \"multinli_tr\")\r\nsnli_tr = load_dataset(\"nli_tr\", \"snli_tr\")\r\n```\r\n\r\nThanks for your help in reviewing our pull request.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/787\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/787\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/786","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/786\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/786\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/786\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/786","id":733761717,"node_id":"MDU6SXNzdWU3MzM3NjE3MTc=","number":786,"title":"feat(dataset): multiprocessing _generate_examples","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-31T16:52:16Z","updated_at":"2020-11-09T08:38:38Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"forking this out of #741, this issue is only regarding multiprocessing\r\n\r\nI'd love if there was a dataset configuration parameter `workers`, where when it is `1` it behaves as it does right now, and when its `>1` maybe `_generate_examples` can also get the `pool` and return an iterable using the pool.\r\n\r\nIn my use case, I would instead of:\r\n```python\r\nfor datum in data:\r\n yield self.load_datum(datum)\r\n```\r\ndo:\r\n```python\r\nreturn pool.map(self.load_datum, data)\r\n```\r\n\r\nAs the dataset in question, as an example, has **only** 7000 rows, and takes 10 seconds to load each row on average, it takes almost 20 hours to load the entire dataset.\r\nIf this was a larger dataset (and many such datasets exist), it would take multiple days to complete.\r\n\r\nUsing multiprocessing, for example, 40 cores, could speed it up dramatically. For this dataset, hopefully to fully load in under an hour.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/786\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/786\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/785","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/785\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/785\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/785\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/785","id":733719419,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEzNDMyNTM1","number":785,"title":"feat(aslg_pc12): add dev and test data splits","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-10-31T13:25:38Z","updated_at":"2020-11-10T15:29:30Z","closed_at":"2020-11-10T15:29:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/785","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/785","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/785.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/785.patch","merged_at":null},"body":"For reproducibility sake, it's best if there are defined dev and test splits.\r\n\r\nThe original paper author did not define splits for the entire dataset, not for the sample loaded via this library, so I decided to define:\r\n- 5\/7th for train\r\n- 1\/7th for dev\r\n- 1\/7th for test\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/785\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/785\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/784","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/784\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/784\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/784\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/784","id":733700463,"node_id":"MDU6SXNzdWU3MzM3MDA0NjM=","number":784,"title":"Issue with downloading Wikipedia data for low resource language","user":{"login":"SamuelCahyawijaya","id":2826602,"node_id":"MDQ6VXNlcjI4MjY2MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2826602?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya","html_url":"https:\/\/github.com\/SamuelCahyawijaya","followers_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/followers","following_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/orgs","repos_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/repos","events_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-10-31T11:40:00Z","updated_at":"2022-02-09T17:50:16Z","closed_at":"2020-11-25T15:42:13Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I tried to download Sundanese and Javanese wikipedia data with the following snippet\r\n```\r\njv_wiki = datasets.load_dataset('wikipedia', '20200501.jv', beam_runner='DirectRunner')\r\nsu_wiki = datasets.load_dataset('wikipedia', '20200501.su', beam_runner='DirectRunner')\r\n```\r\nAnd I get the following error for these two languages:\r\nJavanese\r\n```\r\nFileNotFoundError: Couldn't find file at https:\/\/dumps.wikimedia.org\/jvwiki\/20200501\/dumpstatus.json\r\n```\r\n\r\nSundanese\r\n```\r\nFileNotFoundError: Couldn't find file at https:\/\/dumps.wikimedia.org\/suwiki\/20200501\/dumpstatus.json\r\n```\r\n\r\nI found from https:\/\/github.com\/huggingface\/datasets\/issues\/577#issuecomment-688435085 that for small languages, they are directly downloaded and parsed from the Wikipedia dump site, but both of `https:\/\/dumps.wikimedia.org\/jvwiki\/20200501\/dumpstatus.json` and `https:\/\/dumps.wikimedia.org\/suwiki\/20200501\/dumpstatus.json` are no longer valid.\r\n\r\n Any suggestions on how to handle this issue? Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/784\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/784\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/783","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/783\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/783\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/783\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/783","id":733536254,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEzMzAwODUz","number":783,"title":"updated links to v1.3 of quail, fixed the description","user":{"login":"annargrs","id":1450322,"node_id":"MDQ6VXNlcjE0NTAzMjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1450322?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/annargrs","html_url":"https:\/\/github.com\/annargrs","followers_url":"https:\/\/api.github.com\/users\/annargrs\/followers","following_url":"https:\/\/api.github.com\/users\/annargrs\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/annargrs\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/annargrs\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/annargrs\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/annargrs\/orgs","repos_url":"https:\/\/api.github.com\/users\/annargrs\/repos","events_url":"https:\/\/api.github.com\/users\/annargrs\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/annargrs\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-30T21:47:33Z","updated_at":"2020-11-29T23:05:19Z","closed_at":"2020-11-29T23:05:18Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/783","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/783","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/783.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/783.patch","merged_at":null},"body":"updated links to v1.3 of quail, fixed the description","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/783\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/783\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/782","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/782\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/782\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/782\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/782","id":733316463,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEzMTE2MTM0","number":782,"title":"Fix metric deletion when attribuets are missing","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-30T16:16:10Z","updated_at":"2020-10-30T16:47:53Z","closed_at":"2020-10-30T16:47:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/782","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/782","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/782.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/782.patch","merged_at":"2020-10-30T16:47:52Z"},"body":"When you call `del` on a metric we want to make sure that the arrow attributes are not already deleted.\r\nI just added `if hasattr(...)` to make sure it doesn't crash","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/782\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/782\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/781","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/781\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/781\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/781\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/781","id":733168609,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEyOTkyMzQw","number":781,"title":"Add XNLI train set","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-30T13:21:53Z","updated_at":"2020-11-09T18:22:50Z","closed_at":"2020-11-09T18:22:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/781","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/781","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/781.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/781.patch","merged_at":"2020-11-09T18:22:49Z"},"body":"I added the train set that was built using the translated MNLI.\r\nNow you can load the dataset specifying one language:\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nxnli_en = load_dataset(\"xnli\", \"en\")\r\nprint(xnli_en[\"train\"][0])\r\n# {'hypothesis': 'Product and geography are what make cream skimming work .', 'label': 1, 'premise': 'Conceptually cream skimming has two basic dimensions - product and geography .'}\r\nprint(xnli_en[\"test\"][0]) \r\n# {'hypothesis': 'I havent spoken to him again.', 'label': 2, 'premise': \"Well, I wasn't even thinking about that, but I was so frustrated, and, I ended up talking to him again.\"}\r\n```\r\n\r\nCc @sgugger ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/781\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":2,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/781\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/780","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/780\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/780\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/780\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/780","id":732738647,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEyNjM0MzI0","number":780,"title":"Add ASNQ dataset","user":{"login":"mkserge","id":2992022,"node_id":"MDQ6VXNlcjI5OTIwMjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2992022?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mkserge","html_url":"https:\/\/github.com\/mkserge","followers_url":"https:\/\/api.github.com\/users\/mkserge\/followers","following_url":"https:\/\/api.github.com\/users\/mkserge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mkserge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mkserge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mkserge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mkserge\/orgs","repos_url":"https:\/\/api.github.com\/users\/mkserge\/repos","events_url":"https:\/\/api.github.com\/users\/mkserge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mkserge\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-10-29T23:31:56Z","updated_at":"2020-11-10T09:26:23Z","closed_at":"2020-11-10T09:26:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/780","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/780","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/780.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/780.patch","merged_at":"2020-11-10T09:26:23Z"},"body":"This pull request adds the ASNQ dataset. It is a dataset for answer sentence selection derived from Google Natural Questions (NQ) dataset (Kwiatkowski et al. 2019). The dataset details can be found in the paper at https:\/\/arxiv.org\/abs\/1911.04118\r\n\r\nThe dataset is authored by Siddhant Garg, Thuy Vu and Alessandro Moschitti. \r\n\r\n_Please note that I have no affiliation with the authors._\r\n\r\nRepo: https:\/\/github.com\/alexa\/wqa_tanda\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/780\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/780\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/779","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/779\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/779\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/779\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/779","id":732514887,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEyNDQzMjY0","number":779,"title":"Feature\/fidelity metrics from emnlp2020 evaluating and characterizing human rationales","user":{"login":"rathoreanirudh","id":11327413,"node_id":"MDQ6VXNlcjExMzI3NDEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11327413?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rathoreanirudh","html_url":"https:\/\/github.com\/rathoreanirudh","followers_url":"https:\/\/api.github.com\/users\/rathoreanirudh\/followers","following_url":"https:\/\/api.github.com\/users\/rathoreanirudh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rathoreanirudh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rathoreanirudh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rathoreanirudh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rathoreanirudh\/orgs","repos_url":"https:\/\/api.github.com\/users\/rathoreanirudh\/repos","events_url":"https:\/\/api.github.com\/users\/rathoreanirudh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rathoreanirudh\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-10-29T17:31:14Z","updated_at":"2020-11-13T18:11:22Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/779","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/779","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/779.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/779.patch","merged_at":null},"body":"This metric computes fidelity (Yu et al. 2019, DeYoung et al. 2019) and normalized fidelity (Carton et al. 2020).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/779\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/779\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/778","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/778\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/778\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/778\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/778","id":732449652,"node_id":"MDU6SXNzdWU3MzI0NDk2NTI=","number":778,"title":"Unexpected behavior when loading cached csv file?","user":{"login":"dcfidalgo","id":15979778,"node_id":"MDQ6VXNlcjE1OTc5Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15979778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dcfidalgo","html_url":"https:\/\/github.com\/dcfidalgo","followers_url":"https:\/\/api.github.com\/users\/dcfidalgo\/followers","following_url":"https:\/\/api.github.com\/users\/dcfidalgo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dcfidalgo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dcfidalgo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dcfidalgo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dcfidalgo\/orgs","repos_url":"https:\/\/api.github.com\/users\/dcfidalgo\/repos","events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-10-29T16:06:10Z","updated_at":"2020-10-29T21:21:27Z","closed_at":"2020-10-29T21:21:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I read a csv file from disk and forgot so specify the right delimiter. When i read the csv file again specifying the right delimiter it had no effect since it was using the cached dataset. I am not sure if this is unwanted behavior since i can always specify `download_mode=\"force_redownload\"`. But i think it would be nice if the information what `delimiter` or what `column_names` were used would influence the identifier of the cached dataset.\r\n\r\nSmall snippet to reproduce the behavior:\r\n```python\r\nimport datasets\r\n\r\nwith open(\"dummy_data.csv\", \"w\") as file:\r\n file.write(\"test,this;text\\n\")\r\n\r\nprint(datasets.load_dataset(\"csv\", data_files=\"dummy_data.csv\", split=\"train\").column_names)\r\n# [\"test\", \"this;text\"]\r\n\r\nprint(datasets.load_dataset(\"csv\", data_files=\"dummy_data.csv\", split=\"train\", delimiter=\";\").column_names)\r\n# still [\"test\", \"this;text\"]\r\n```\r\n\r\nBy the way, thanks a lot for this amazing library! :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/778\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/778\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/777","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/777\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/777\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/777\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/777","id":732376648,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEyMzI2ODM2","number":777,"title":"Better error message for uninitialized metric","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-29T14:42:50Z","updated_at":"2020-10-29T15:18:26Z","closed_at":"2020-10-29T15:18:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/777","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/777","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/777.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/777.patch","merged_at":"2020-10-29T15:18:23Z"},"body":"When calling `metric.compute()` without having called `metric.add` or `metric.add_batch` at least once, the error was quite cryptic. I added a better error message\r\n\r\nFix #729 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/777\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/777\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/776","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/776\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/776\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/776\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/776","id":732343550,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEyMjk5NzQx","number":776,"title":"Allow custom split names in text dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-29T14:04:06Z","updated_at":"2020-10-30T13:46:45Z","closed_at":"2020-10-30T13:23:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/776","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/776","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/776.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/776.patch","merged_at":"2020-10-30T13:23:52Z"},"body":"The `text` dataset used to return only splits like train, test and validation. Other splits were ignored.\r\nNow any split name is allowed.\r\n\r\nI did the same for `json`, `pandas` and `csv`\r\n\r\nFix #735 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/776\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/776\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/775","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/775\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/775\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/775\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/775","id":732287504,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEyMjUyODI3","number":775,"title":"Properly delete metrics when a process is killed","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-29T12:52:07Z","updated_at":"2020-10-29T14:01:20Z","closed_at":"2020-10-29T14:01:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/775","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/775","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/775.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/775.patch","merged_at":"2020-10-29T14:01:19Z"},"body":"Tests are flaky when using metrics in distributed setup.\r\nThere is because of one test that make sure that using two possibly incompatible metric computation (same exp id) either works or raises the right error.\r\nHowever if the error is raised, all the processes of the metric are killed, and the open files (arrow + lock files) are not closed correctly. This causes PermissionError on Windows when deleting the temporary directory.\r\n\r\nTo fix that I added a `finally` clause in the function passed to multiprocess to properly close the files when the process exits.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/775\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/775\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/774","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/774\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/774\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/774\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/774","id":732265741,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEyMjM0NjA0","number":774,"title":"[ROUGE] Add description to Rouge metric","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-29T12:19:32Z","updated_at":"2020-10-29T17:55:50Z","closed_at":"2020-10-29T17:55:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/774","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/774","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/774.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/774.patch","merged_at":"2020-10-29T17:55:48Z"},"body":"Add information about case sensitivity to ROUGE.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/774\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/774\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/773","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/773\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/773\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/773\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/773","id":731684153,"node_id":"MDU6SXNzdWU3MzE2ODQxNTM=","number":773,"title":"Adding CC-100: Monolingual Datasets from Web Crawl Data","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false},"assignees":[{"login":"abhishekkrthakur","id":1183441,"node_id":"MDQ6VXNlcjExODM0NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1183441?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abhishekkrthakur","html_url":"https:\/\/github.com\/abhishekkrthakur","followers_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/followers","following_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/orgs","repos_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/repos","events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abhishekkrthakur\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2020-10-28T18:20:41Z","updated_at":"2022-01-26T13:22:54Z","closed_at":"2020-12-14T10:20:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** CC-100: Monolingual Datasets from Web Crawl Data\r\n- **Description:** https:\/\/twitter.com\/alex_conneau\/status\/1321507120848625665\r\n- **Paper:** https:\/\/arxiv.org\/abs\/1911.02116\r\n- **Data:** http:\/\/data.statmt.org\/cc-100\/\r\n- **Motivation:** A large scale multi-lingual language modeling dataset. Text is de-duplicated and filtered by how \"Wikipedia-like\" it is, hopefully helping avoid some of the worst parts of the common crawl.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/773\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/773\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/772","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/772\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/772\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/772\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/772","id":731612430,"node_id":"MDExOlB1bGxSZXF1ZXN0NTExNjg4ODMx","number":772,"title":"Fix metric with cache dir","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-28T16:43:13Z","updated_at":"2020-10-29T09:34:44Z","closed_at":"2020-10-29T09:34:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/772","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/772","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/772.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/772.patch","merged_at":"2020-10-29T09:34:42Z"},"body":"The cache_dir provided by the user was concatenated twice and therefore causing FileNotFound errors.\r\nThe tests didn't cover the case of providing `cache_dir=` for metrics because of a stupid issue (it was not using the right parameter).\r\n\r\nI remove the double concatenation and I fixed the tests\r\n\r\nFix #728 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/772\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/772\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/771","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/771\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/771\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/771\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/771","id":731482213,"node_id":"MDU6SXNzdWU3MzE0ODIyMTM=","number":771,"title":"Using `Dataset.map` with `n_proc>1` print multiple progress bars","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-28T14:13:27Z","updated_at":"2020-10-28T14:18:17Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When using `Dataset.map` with `n_proc > 1`, only one of the processes should print a progress bar (to make the output readable). Right now, `n_proc` progress bars are printed.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/771\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/771\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/770","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/770\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/770\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/770\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/770","id":731445222,"node_id":"MDExOlB1bGxSZXF1ZXN0NTExNTQ5MTg1","number":770,"title":"Fix custom builder caching","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-28T13:32:24Z","updated_at":"2020-10-29T09:36:03Z","closed_at":"2020-10-29T09:36:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/770","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/770","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/770.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/770.patch","merged_at":"2020-10-29T09:36:01Z"},"body":"The cache directory of a dataset didn't take into account additional parameters that the user could specify such as `features` or any parameter of the builder configuration kwargs (ex: `encoding` for the `text` dataset).\r\n\r\nTo fix that, the cache directory name now has a suffix that depends on all of them.\r\n\r\nFix #730\r\nFix #750 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/770\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/770\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/769","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/769\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/769\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/769\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/769","id":731257104,"node_id":"MDU6SXNzdWU3MzEyNTcxMDQ=","number":769,"title":"How to choose proper download_mode in function load_dataset?","user":{"login":"jzq2000","id":48550398,"node_id":"MDQ6VXNlcjQ4NTUwMzk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48550398?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jzq2000","html_url":"https:\/\/github.com\/jzq2000","followers_url":"https:\/\/api.github.com\/users\/jzq2000\/followers","following_url":"https:\/\/api.github.com\/users\/jzq2000\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jzq2000\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jzq2000\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jzq2000\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jzq2000\/orgs","repos_url":"https:\/\/api.github.com\/users\/jzq2000\/repos","events_url":"https:\/\/api.github.com\/users\/jzq2000\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jzq2000\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-10-28T09:16:19Z","updated_at":"2020-10-28T10:34:59Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I am a beginner to datasets and I try to use datasets to load my csv file.\r\nmy csv file looks like this\r\n\r\n``` \r\ntext,label\r\n\"Effective but too-tepid biopic\",3\r\n\"If you sometimes like to go to the movies to have fun , Wasabi is a good place to start .\",4\r\n\"Emerges as something rare , an issue movie that 's so honest and keenly observed that it does n't feel like one .\",5\r\n```\r\n\r\nFirst I try to use this command to load my csv file . \r\n\r\n``` python\r\ndataset=load_dataset('csv', data_files=['sst_test.csv'])\r\n```\r\n\r\nIt seems good, but when i try to overwrite the convert_options to convert 'label' columns from int64 to float32 like this.\r\n\r\n``` python\r\nimport pyarrow as pa\r\nfrom pyarrow import csv\r\nread_options = csv.ReadOptions(block_size=1024*1024)\r\nparse_options = csv.ParseOptions()\r\nconvert_options = csv.ConvertOptions(column_types={'text': pa.string(), 'label': pa.float32()})\r\ndataset = load_dataset('csv', data_files=['sst_test.csv'], read_options=read_options,\r\n parse_options=parse_options, convert_options=convert_options)\r\n```\r\n\r\nIt keeps the same:\r\n\r\n```shell\r\nDataset(features: {'text': Value(dtype='string', id=None), 'label': Value(dtype='int64', id=None)}, num_rows: 2210)\r\n```\r\n\r\nI think this issue is caused by the parameter \"download_mode\" Default to REUSE_DATASET_IF_EXISTS because after I delete the cache_dir, it seems right.\r\n\r\nIs it a bug? How to choose proper download_mode to avoid this issue?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/769\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/769\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/768","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/768\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/768\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/768\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/768","id":730908060,"node_id":"MDU6SXNzdWU3MzA5MDgwNjA=","number":768,"title":"Add a `lazy_map` method to `Dataset` and `DatasetDict`","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-27T22:33:03Z","updated_at":"2020-10-28T08:58:13Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The library is great, but it would be even more awesome with a `lazy_map` method implemented on `Dataset` and `DatasetDict`. This would apply a function on a give item but when the item is requested. Two use cases:\r\n\r\n1. load image on the fly\r\n2. apply a random function and get different outputs at each epoch (like data augmentation or randomly masking a part of a sentence for BERT-like objectives).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/768\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/768\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/767","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/767\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/767\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/767\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/767","id":730771610,"node_id":"MDU6SXNzdWU3MzA3NzE2MTA=","number":767,"title":"Add option for named splits when using ds.train_test_split","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-27T19:59:44Z","updated_at":"2020-11-10T14:05:21Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"### Feature Request \ud83d\ude80 \r\n\r\nCan we add a way to name your splits when using the `.train_test_split` function?\r\n\r\nIn almost every use case I've come across, I have a `train` and a `test` split in my `DatasetDict`, and I want to create a `validation` split. Therefore, its kinda useless to get a `test` split back from `train_test_split`, as it'll just overwrite my real `test` split that I intended to keep.\r\n\r\n### Workaround\r\n\r\nthis is my hack for dealin with this, for now :slightly_smiling_face:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\u200b\r\n\u200b\r\nds = load_dataset('imdb')\r\nds['train'], ds['validation'] = ds['train'].train_test_split(.1).values()\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/767\/reactions","total_count":4,"+1":4,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/767\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/766","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/766\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/766\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/766\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/766","id":730669596,"node_id":"MDU6SXNzdWU3MzA2Njk1OTY=","number":766,"title":"[GEM] add DART data-to-text generation dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-10-27T17:34:04Z","updated_at":"2020-12-03T13:37:18Z","closed_at":"2020-12-03T13:37:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** DART\r\n- **Description:** DART consists of 82,191 examples across different domains with each input being a semantic RDF triple set derived from data records in tables and the tree ontology of the schema, annotated with sentence descriptions that cover all facts in the triple set.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2007.02871v1\r\n- **Data:** https:\/\/github.com\/Yale-LILY\/dart\r\n- **Motivation:** the dataset will likely be included in the GEM benchmark\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/766\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/766\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/765","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/765\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/765\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/765\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/765","id":730668332,"node_id":"MDU6SXNzdWU3MzA2NjgzMzI=","number":765,"title":"[GEM] Add DART data-to-text generation dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-27T17:32:23Z","updated_at":"2020-10-27T17:34:21Z","closed_at":"2020-10-27T17:34:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** DART\r\n- **Description:** DART consists of 82,191 examples across different domains with each input being a semantic RDF triple set derived from data records in tables and the tree ontology of the schema, annotated with sentence descriptions that cover all facts in the triple set.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2007.02871v1\r\n- **Data:** https:\/\/github.com\/Yale-LILY\/dart\r\n- **Motivation:** It will likely be included in the GEM generation evaluation benchmark\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/huggingface.co\/docs\/datasets\/share_dataset.html).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/765\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/765\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/764","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/764\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/764\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/764\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/764","id":730617828,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEwODkyMTk2","number":764,"title":"Adding Issue Template for Dataset Requests","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-27T16:37:08Z","updated_at":"2020-10-27T17:25:26Z","closed_at":"2020-10-27T17:25:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/764","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/764","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/764.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/764.patch","merged_at":"2020-10-27T17:25:25Z"},"body":"adding .github\/ISSUE_TEMPLATE\/add-dataset.md","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/764\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/764\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/763","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/763\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/763\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/763\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/763","id":730593631,"node_id":"MDExOlB1bGxSZXF1ZXN0NTEwODcyMDYx","number":763,"title":"Fixed errors in bertscore related to custom baseline","user":{"login":"juanjucm","id":36761132,"node_id":"MDQ6VXNlcjM2NzYxMTMy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36761132?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/juanjucm","html_url":"https:\/\/github.com\/juanjucm","followers_url":"https:\/\/api.github.com\/users\/juanjucm\/followers","following_url":"https:\/\/api.github.com\/users\/juanjucm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/juanjucm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/juanjucm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/juanjucm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/juanjucm\/orgs","repos_url":"https:\/\/api.github.com\/users\/juanjucm\/repos","events_url":"https:\/\/api.github.com\/users\/juanjucm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/juanjucm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-27T16:08:35Z","updated_at":"2020-10-28T17:59:25Z","closed_at":"2020-10-28T17:59:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/763","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/763","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/763.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/763.patch","merged_at":"2020-10-28T17:59:25Z"},"body":"[bertscore version 0.3.6 ](https:\/\/github.com\/Tiiiger\/bert_score) added support for custom baseline files. This update added extra argument `baseline_path` to BERTScorer class as well as some extra boolean parameters `use_custom_baseline` in functions like `get_hash(model, num_layers, idf, rescale_with_baseline, use_custom_baseline)`.\r\n\r\nThis PR fix those matching errors in bertscore metric implementation.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/763\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/763\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/762","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/762\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/762\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/762\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/762","id":730586972,"node_id":"MDU6SXNzdWU3MzA1ODY5NzI=","number":762,"title":"[GEM] Add Czech Restaurant data-to-text generation dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-27T16:00:47Z","updated_at":"2020-12-03T13:37:44Z","closed_at":"2020-12-03T13:37:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"- Paper: https:\/\/www.aclweb.org\/anthology\/W19-8670.pdf\r\n- Data: https:\/\/github.com\/UFAL-DSG\/cs_restaurant_dataset\r\n- The dataset will likely be part of the GEM benchmark","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/762\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/762\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/761","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/761\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/761\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/761\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/761","id":729898867,"node_id":"MDU6SXNzdWU3Mjk4OTg4Njc=","number":761,"title":"Downloaded datasets are not usable offline","user":{"login":"ghazi-f","id":25091538,"node_id":"MDQ6VXNlcjI1MDkxNTM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25091538?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghazi-f","html_url":"https:\/\/github.com\/ghazi-f","followers_url":"https:\/\/api.github.com\/users\/ghazi-f\/followers","following_url":"https:\/\/api.github.com\/users\/ghazi-f\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghazi-f\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghazi-f\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghazi-f\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghazi-f\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghazi-f\/repos","events_url":"https:\/\/api.github.com\/users\/ghazi-f\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghazi-f\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-26T20:54:46Z","updated_at":"2020-10-27T14:04:29Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I've been trying to use the IMDB dataset offline, but after downloading it and turning off the internet it still raises an error from the ```requests``` library trying to reach for the online dataset.\r\nIs this the intended behavior ?\r\n(Sorry, I wrote the the first version of this issue while still on nlp 0.3.0).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/761\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/761\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/760","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/760\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/760\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/760\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/760","id":729637917,"node_id":"MDU6SXNzdWU3Mjk2Mzc5MTc=","number":760,"title":"Add meta-data to the HANS dataset","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"},{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"assignees":[{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2020-10-26T14:56:53Z","updated_at":"2020-12-03T13:38:34Z","closed_at":"2020-12-03T13:38:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The current version of the [HANS dataset](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/hans\/hans.py) is missing the additional information provided for each example, including the sentence parses, heuristic and subcase.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/760\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/760\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/759","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/759\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/759\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/759\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/759","id":729046916,"node_id":"MDU6SXNzdWU3MjkwNDY5MTY=","number":759,"title":"(Load dataset failure) ConnectionError: Couldn\u2019t reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.2\/datasets\/cnn_dailymail\/cnn_dailymail.py","user":{"login":"AI678","id":63541083,"node_id":"MDQ6VXNlcjYzNTQxMDgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/63541083?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AI678","html_url":"https:\/\/github.com\/AI678","followers_url":"https:\/\/api.github.com\/users\/AI678\/followers","following_url":"https:\/\/api.github.com\/users\/AI678\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AI678\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AI678\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AI678\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AI678\/orgs","repos_url":"https:\/\/api.github.com\/users\/AI678\/repos","events_url":"https:\/\/api.github.com\/users\/AI678\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AI678\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":15,"created_at":"2020-10-25T15:34:57Z","updated_at":"2021-08-04T18:10:09Z","closed_at":"2021-08-04T18:10:09Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hey, I want to load the cnn-dailymail dataset for fine-tune.\r\nI write the code like this\r\nfrom datasets import load_dataset\r\n\r\ntest_dataset = load_dataset(\u201ccnn_dailymail\u201d, \u201c3.0.0\u201d, split=\u201ctrain\u201d)\r\n\r\nAnd I got the following errors.\r\n\r\nTraceback (most recent call last):\r\nFile \u201ctest.py\u201d, line 7, in\r\ntest_dataset = load_dataset(\u201ccnn_dailymail\u201d, \u201c3.0.0\u201d, split=\u201ctest\u201d)\r\nFile \u201cC:\\Users\\666666\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\datasets\\load.py\u201d, line 589, in load_dataset\r\nmodule_path, hash = prepare_module(\r\nFile \u201cC:\\Users\\666666\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\datasets\\load.py\u201d, line 268, in prepare_module\r\nlocal_path = cached_path(file_path, download_config=download_config)\r\nFile \u201cC:\\Users\\666666\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\datasets\\utils\\file_utils.py\u201d, line 300, in cached_path\r\noutput_path = get_from_cache(\r\nFile \u201cC:\\Users\\666666\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\datasets\\utils\\file_utils.py\u201d, line 475, in get_from_cache\r\nraise ConnectionError(\u201cCouldn\u2019t reach {}\u201d.format(url))\r\nConnectionError: Couldn\u2019t reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.2\/datasets\/cnn_dailymail\/cnn_dailymail.py\r\n\r\nHow can I fix this ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/759\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/759\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/758","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/758\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/758\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/758\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/758","id":728638559,"node_id":"MDU6SXNzdWU3Mjg2Mzg1NTk=","number":758,"title":"Process 0 very slow when using num_procs with map to tokenizer","user":{"login":"ksjae","id":17930170,"node_id":"MDQ6VXNlcjE3OTMwMTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17930170?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ksjae","html_url":"https:\/\/github.com\/ksjae","followers_url":"https:\/\/api.github.com\/users\/ksjae\/followers","following_url":"https:\/\/api.github.com\/users\/ksjae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ksjae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ksjae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ksjae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ksjae\/orgs","repos_url":"https:\/\/api.github.com\/users\/ksjae\/repos","events_url":"https:\/\/api.github.com\/users\/ksjae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ksjae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-10-24T02:40:20Z","updated_at":"2020-10-28T03:59:46Z","closed_at":"2020-10-28T03:59:45Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"\"image\"\r\nThe code I am using is\r\n```\r\n\r\n dataset = load_dataset(\"text\", data_files=[file_path], split='train')\r\n dataset = dataset.map(lambda ex: tokenizer(ex[\"text\"], add_special_tokens=True,\r\n truncation=True, max_length=args.block_size), num_proc=8)\r\n dataset.set_format(type='torch', columns=['input_ids'])\r\n dataset.save_to_disk(file_path+'.arrow')\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/758\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/758\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/757","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/757\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/757\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/757\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/757","id":728241494,"node_id":"MDU6SXNzdWU3MjgyNDE0OTQ=","number":757,"title":"CUDA out of memory","user":{"login":"li1117heex","id":47059217,"node_id":"MDQ6VXNlcjQ3MDU5MjE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47059217?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/li1117heex","html_url":"https:\/\/github.com\/li1117heex","followers_url":"https:\/\/api.github.com\/users\/li1117heex\/followers","following_url":"https:\/\/api.github.com\/users\/li1117heex\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/li1117heex\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/li1117heex\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/li1117heex\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/li1117heex\/orgs","repos_url":"https:\/\/api.github.com\/users\/li1117heex\/repos","events_url":"https:\/\/api.github.com\/users\/li1117heex\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/li1117heex\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-10-23T13:57:00Z","updated_at":"2020-12-23T14:06:29Z","closed_at":"2020-12-23T14:06:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In your dataset ,cuda run out of memory as long as the trainer begins:\r\nhowever, without changing any other element\/parameter,just switch dataset to `LineByLineTextDataset`,everything becames OK.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/757\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/757\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/756","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/756\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/756\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/756\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/756","id":728211373,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA4OTYwNTc3","number":756,"title":"Start community-provided dataset docs ","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-23T13:17:41Z","updated_at":"2020-10-26T12:55:20Z","closed_at":"2020-10-26T12:55:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/756","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/756","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/756.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/756.patch","merged_at":"2020-10-26T12:55:19Z"},"body":"Continuation of #736 with clean fork.\r\n\r\n#### Old description\r\nThis is what I did to get the pseudo-labels updated. Not sure if it generalizes, but I figured I would write it down. It was pretty easy because all I had to do was make properly formatted directories and change URLs.\r\n\r\nIn slack @thomwolf called it a user-namespace dataset, but the docs call it community dataset.\r\nI think the first naming is clearer, but I didn't address that here.\r\n\r\nI didn't add metadata, will try that.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/756\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/756\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/755","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/755\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/755\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/755\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/755","id":728203821,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA4OTU0NDI2","number":755,"title":"Start community-provided dataset docs V2","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-23T13:07:30Z","updated_at":"2020-10-23T13:15:37Z","closed_at":"2020-10-23T13:15:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/755","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/755","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/755.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/755.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/755\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/755\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/754","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/754\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/754\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/754\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/754","id":727863105,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA4NjczNzM2","number":754,"title":"Use full released xsum dataset","user":{"login":"jbragg","id":2238344,"node_id":"MDQ6VXNlcjIyMzgzNDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2238344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jbragg","html_url":"https:\/\/github.com\/jbragg","followers_url":"https:\/\/api.github.com\/users\/jbragg\/followers","following_url":"https:\/\/api.github.com\/users\/jbragg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jbragg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jbragg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jbragg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jbragg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jbragg\/repos","events_url":"https:\/\/api.github.com\/users\/jbragg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jbragg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-10-23T03:29:49Z","updated_at":"2021-01-01T03:11:56Z","closed_at":"2020-10-26T12:56:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/754","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/754","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/754.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/754.patch","merged_at":"2020-10-26T12:56:58Z"},"body":"#672 Fix xsum to expand coverage and include IDs\r\nCode based on parser from older version of `datasets\/xsum\/xsum.py`\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/754\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/754\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/753","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/753\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/753\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/753\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/753","id":727434935,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA4MzI4ODM0","number":753,"title":"Fix doc links to viewer","user":{"login":"Pierrci","id":5020707,"node_id":"MDQ6VXNlcjUwMjA3MDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5020707?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Pierrci","html_url":"https:\/\/github.com\/Pierrci","followers_url":"https:\/\/api.github.com\/users\/Pierrci\/followers","following_url":"https:\/\/api.github.com\/users\/Pierrci\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Pierrci\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Pierrci\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Pierrci\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Pierrci\/orgs","repos_url":"https:\/\/api.github.com\/users\/Pierrci\/repos","events_url":"https:\/\/api.github.com\/users\/Pierrci\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Pierrci\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-22T14:20:16Z","updated_at":"2020-10-23T08:42:11Z","closed_at":"2020-10-23T08:42:11Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/753","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/753","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/753.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/753.patch","merged_at":"2020-10-23T08:42:11Z"},"body":"It seems #733 forgot some links in the doc :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/753\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/753\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/752","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/752\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/752\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/752\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/752","id":726917801,"node_id":"MDU6SXNzdWU3MjY5MTc4MDE=","number":752,"title":"Clicking on a metric in the search page points to datasets page giving \"Missing dataset\" warning","user":{"login":"ogabrielluiz","id":24829397,"node_id":"MDQ6VXNlcjI0ODI5Mzk3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24829397?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ogabrielluiz","html_url":"https:\/\/github.com\/ogabrielluiz","followers_url":"https:\/\/api.github.com\/users\/ogabrielluiz\/followers","following_url":"https:\/\/api.github.com\/users\/ogabrielluiz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ogabrielluiz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ogabrielluiz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ogabrielluiz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ogabrielluiz\/orgs","repos_url":"https:\/\/api.github.com\/users\/ogabrielluiz\/repos","events_url":"https:\/\/api.github.com\/users\/ogabrielluiz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ogabrielluiz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-10-21T22:56:23Z","updated_at":"2020-10-22T16:19:42Z","closed_at":"2020-10-22T16:19:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi! Sorry if this isn't the right place to talk about the website, I just didn't exactly where to write this.\r\n\r\nSearching a metric in https:\/\/huggingface.co\/metrics gives the right results but clicking on a metric (E.g ROUGE) points to https:\/\/huggingface.co\/datasets\/rouge. Clicking on a metric without searching points to the right page.\r\n\r\nThanks for all the great work!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/752\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/752\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/751","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/751\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/751\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/751\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/751","id":726820191,"node_id":"MDU6SXNzdWU3MjY4MjAxOTE=","number":751,"title":"Error loading ms_marco v2.1 using load_dataset()","user":{"login":"JainSahit","id":30478979,"node_id":"MDQ6VXNlcjMwNDc4OTc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30478979?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JainSahit","html_url":"https:\/\/github.com\/JainSahit","followers_url":"https:\/\/api.github.com\/users\/JainSahit\/followers","following_url":"https:\/\/api.github.com\/users\/JainSahit\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JainSahit\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JainSahit\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JainSahit\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JainSahit\/orgs","repos_url":"https:\/\/api.github.com\/users\/JainSahit\/repos","events_url":"https:\/\/api.github.com\/users\/JainSahit\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JainSahit\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-10-21T19:54:43Z","updated_at":"2020-11-05T01:31:57Z","closed_at":"2020-11-05T01:31:57Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Code:\r\n`dataset = load_dataset('ms_marco', 'v2.1')`\r\n\r\nError:\r\n```\r\n`---------------------------------------------------------------------------\r\nJSONDecodeError Traceback (most recent call last)\r\n in ()\r\n 9 \r\n 10 # Downloading and loading a dataset\r\n---> 11 dataset = load_dataset('ms_marco', 'v2.1')\r\n\r\n10 frames\r\n\/usr\/lib\/python3.6\/json\/decoder.py in raw_decode(self, s, idx)\r\n 353 \"\"\"\r\n 354 try:\r\n--> 355 obj, end = self.scan_once(s, idx)\r\n 356 except StopIteration as err:\r\n 357 raise JSONDecodeError(\"Expecting value\", s, err.value) from None\r\n\r\nJSONDecodeError: Unterminated string starting at: line 1 column 388988661 (char 388988660)\r\n`\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/751\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/751\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/750","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/750\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/750\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/750\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/750","id":726589446,"node_id":"MDU6SXNzdWU3MjY1ODk0NDY=","number":750,"title":"load_dataset doesn't include `features` in its hash","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-21T15:16:41Z","updated_at":"2020-10-29T09:36:01Z","closed_at":"2020-10-29T09:36:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It looks like the function `load_dataset` does not include what's passed in the `features` argument when creating a hash for a given dataset. As a result, if a user includes new features from an already downloaded dataset, those are ignored.\r\n\r\nExample: some models on the hub have a different ordering for the labels than what `datasets` uses for MNLI so I'd like to do something along the lines of:\r\n```\r\ndataset = load_dataset(\"glue\", \"mnli\")\r\nfeatures = dataset[\"train\"].features\r\nfeatures[\"label\"] = ClassLabel(names = ['entailment', 'contradiction', 'neutral']) # new label order\r\ndataset = load_dataset(\"glue\", \"mnli\", features=features)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/750\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/750\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/749","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/749\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/749\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/749\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/749","id":726366062,"node_id":"MDU6SXNzdWU3MjYzNjYwNjI=","number":749,"title":"[XGLUE] Adding new dataset","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":13,"created_at":"2020-10-21T10:51:36Z","updated_at":"2021-01-06T10:02:56Z","closed_at":"2021-01-06T10:02:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"XGLUE is a multilingual GLUE like dataset propesed in this [paper](https:\/\/arxiv.org\/pdf\/2004.01401.pdf).\r\n\r\nI'm planning on adding the dataset to the library myself in a couple of weeks.\r\nAlso tagging @JetRunner @qiweizhen in case I need some guidance ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/749\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/749\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/748","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/748\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/748\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/748\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/748","id":726196589,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA3MzAyNjE3","number":748,"title":"New version of CompGuessWhat?! with refined annotations","user":{"login":"aleSuglia","id":1479733,"node_id":"MDQ6VXNlcjE0Nzk3MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1479733?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aleSuglia","html_url":"https:\/\/github.com\/aleSuglia","followers_url":"https:\/\/api.github.com\/users\/aleSuglia\/followers","following_url":"https:\/\/api.github.com\/users\/aleSuglia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aleSuglia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aleSuglia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aleSuglia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aleSuglia\/orgs","repos_url":"https:\/\/api.github.com\/users\/aleSuglia\/repos","events_url":"https:\/\/api.github.com\/users\/aleSuglia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aleSuglia\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-21T06:55:41Z","updated_at":"2020-10-21T08:52:42Z","closed_at":"2020-10-21T08:46:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/748","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/748","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/748.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/748.patch","merged_at":"2020-10-21T08:46:19Z"},"body":"This pull request introduces a few fixes to the annotations for VisualGenome in the CompGuessWhat?! original split.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/748\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/748\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/747","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/747\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/747\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/747\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/747","id":725884704,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA3MDQ3MDE4","number":747,"title":"Add Quail question answering dataset","user":{"login":"sai-prasanna","id":3595526,"node_id":"MDQ6VXNlcjM1OTU1MjY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3595526?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sai-prasanna","html_url":"https:\/\/github.com\/sai-prasanna","followers_url":"https:\/\/api.github.com\/users\/sai-prasanna\/followers","following_url":"https:\/\/api.github.com\/users\/sai-prasanna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sai-prasanna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sai-prasanna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sai-prasanna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sai-prasanna\/orgs","repos_url":"https:\/\/api.github.com\/users\/sai-prasanna\/repos","events_url":"https:\/\/api.github.com\/users\/sai-prasanna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sai-prasanna\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-20T19:33:14Z","updated_at":"2020-10-21T08:35:15Z","closed_at":"2020-10-21T08:35:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/747","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/747","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/747.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/747.patch","merged_at":"2020-10-21T08:35:15Z"},"body":"QuAIL is a multi-domain RC dataset featuring news, blogs, fiction and user stories. Each domain is represented by 200 texts, which gives us a 4-way data split. The texts are 300-350 word excerpts from CC-licensed texts that were hand-picked so as to make sense to human readers without larger context. Domain diversity mitigates the issue of possible overlap between training and test data of large pre-trained models, which the current SOTA systems are based on. For instance, BERT is trained on Wikipedia + BookCorpus, and was tested on Wikipedia-based SQuAD (Devlin, Chang, Lee, & Toutanova, 2019).\r\n\r\nhttps:\/\/text-machine-lab.github.io\/blog\/2020\/quail\/ @annargrs","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/747\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/747\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/746","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/746\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/746\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/746\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/746","id":725627235,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA2ODMzNDMw","number":746,"title":"dataset(ngt): add ngt dataset initial loading script","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-20T14:04:58Z","updated_at":"2021-03-23T06:19:38Z","closed_at":"2021-03-23T06:19:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/746","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/746","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/746.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/746.patch","merged_at":null},"body":"Currently only making the paths to the annotation ELAN (eaf) file and videos available.\r\nThis is the first accessible way to download this dataset, which is not manual file-by-file.\r\n\r\nOnly downloading the necessary files, the annotation files are very small, 20MB for all of them, but the video files are large, 100GB in total, saved in `mpg` format. \r\nI do not intend to actually store these as an uncompressed array of frames, because it will be huge.\r\n\r\nFuture updates may add pose estimation files for all videos, making it easier to work with this data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/746\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/746\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/745","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/745\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/745\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/745\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/745","id":725589352,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA2ODAxMTI0","number":745,"title":"Fix emotion description","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-20T13:28:39Z","updated_at":"2021-04-22T14:47:31Z","closed_at":"2020-10-21T08:38:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/745","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/745","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/745.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/745.patch","merged_at":"2020-10-21T08:38:27Z"},"body":"Fixes the description of the emotion dataset to reflect the class names observed in the data, not the ones described in the paper.\r\n\r\nI also took the liberty to make use of `ClassLabel` for the emotion labels.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/745\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/745\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/744","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/744\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/744\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/744\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/744","id":724918448,"node_id":"MDU6SXNzdWU3MjQ5MTg0NDg=","number":744,"title":"Dataset Explorer Doesn't Work for squad_es and squad_it","user":{"login":"gaotongxiao","id":22607038,"node_id":"MDQ6VXNlcjIyNjA3MDM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22607038?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gaotongxiao","html_url":"https:\/\/github.com\/gaotongxiao","followers_url":"https:\/\/api.github.com\/users\/gaotongxiao\/followers","following_url":"https:\/\/api.github.com\/users\/gaotongxiao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gaotongxiao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gaotongxiao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gaotongxiao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gaotongxiao\/orgs","repos_url":"https:\/\/api.github.com\/users\/gaotongxiao\/repos","events_url":"https:\/\/api.github.com\/users\/gaotongxiao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gaotongxiao\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-19T19:34:12Z","updated_at":"2020-10-26T16:36:17Z","closed_at":"2020-10-26T16:36:17Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"https:\/\/huggingface.co\/nlp\/viewer\/?dataset=squad_es\r\nhttps:\/\/huggingface.co\/nlp\/viewer\/?dataset=squad_it\r\n\r\nBoth pages show \"OSError: [Errno 28] No space left on device\".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/744\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/744\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/743","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/743\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/743\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/743\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/743","id":724703980,"node_id":"MDU6SXNzdWU3MjQ3MDM5ODA=","number":743,"title":"load_dataset for CSV files not working","user":{"login":"iliemihai","id":2815308,"node_id":"MDQ6VXNlcjI4MTUzMDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2815308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliemihai","html_url":"https:\/\/github.com\/iliemihai","followers_url":"https:\/\/api.github.com\/users\/iliemihai\/followers","following_url":"https:\/\/api.github.com\/users\/iliemihai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliemihai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliemihai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliemihai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliemihai\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliemihai\/repos","events_url":"https:\/\/api.github.com\/users\/iliemihai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliemihai\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":21,"created_at":"2020-10-19T14:53:51Z","updated_at":"2021-09-09T18:26:46Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Similar to #622, I've noticed there is a problem when trying to load a CSV file with datasets.\r\n\r\n`\r\nfrom datasets import load_dataset\r\n`\r\n`\r\ndataset = load_dataset(\"csv\", data_files=[\".\/sample_data.csv\"], delimiter=\"\\t\", column_names=[\"title\", \"text\"], script_version=\"master\")\r\n`\r\n\r\nDisplayed error:\r\n`\r\n...\r\nArrowInvalid: CSV parse error: Expected 2 columns, got 1\r\n`\r\n\r\nI should mention that when I've tried to read data from `https:\/\/github.com\/lhoestq\/transformers\/tree\/custom-dataset-in-rag-retriever\/examples\/rag\/test_data\/my_knowledge_dataset.csv` it worked without a problem. I've read that there might be some problems with \/r character, so I've removed them from the custom dataset, but the problem still remains.\r\n\r\nI've added a colab reproducing the bug, but unfortunately I cannot provide the dataset.\r\nhttps:\/\/colab.research.google.com\/drive\/1Qzu7sC-frZVeniiWOwzoCe_UHZsrlxu8?usp=sharing\r\n\r\nAre there any work around for it ?\r\nThank you","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/743\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/743\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/742","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/742\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/742\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/742\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/742","id":724509974,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA1ODgzNjI3","number":742,"title":"Add OCNLI, a new CLUE dataset","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-19T11:06:33Z","updated_at":"2020-10-22T16:19:49Z","closed_at":"2020-10-22T16:19:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/742","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/742","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/742.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/742.patch","merged_at":"2020-10-22T16:19:47Z"},"body":"OCNLI stands for Original Chinese Natural Language Inference. It is a corpus for\r\n Chinese Natural Language Inference, collected following closely the procedures of MNLI,\r\n but with enhanced strategies aiming for more challenging inference pairs. We want to\r\n emphasize we did not use human\/machine translation in creating the dataset, and thus\r\n our Chinese texts are original and not translated.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/742\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/742\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/741","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/741\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/741\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/741\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/741","id":723924275,"node_id":"MDU6SXNzdWU3MjM5MjQyNzU=","number":741,"title":"Creating dataset consumes too much memory","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":19,"created_at":"2020-10-18T06:07:06Z","updated_at":"2021-03-30T09:47:08Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Moving this issue from https:\/\/github.com\/huggingface\/datasets\/pull\/722 here, because it seems like a general issue.\r\n\r\nGiven the following dataset example, where each example saves a sequence of 260x210x3 images (max length 400):\r\n```python\r\n def _generate_examples(self, base_path, split):\r\n \"\"\" Yields examples. \"\"\"\r\n\r\n filepath = os.path.join(base_path, \"annotations\", \"manual\", \"PHOENIX-2014-T.\" + split + \".corpus.csv\")\r\n images_path = os.path.join(base_path, \"features\", \"fullFrame-210x260px\", split)\r\n\r\n with open(filepath, \"r\", encoding=\"utf-8\") as f:\r\n data = csv.DictReader(f, delimiter=\"|\", quoting=csv.QUOTE_NONE)\r\n for row in data:\r\n frames_path = os.path.join(images_path, row[\"video\"])[:-7]\r\n np_frames = []\r\n for frame_name in os.listdir(frames_path):\r\n frame_path = os.path.join(frames_path, frame_name)\r\n im = Image.open(frame_path)\r\n np_frames.append(np.asarray(im))\r\n im.close()\r\n\r\n yield row[\"name\"], {\"video\": np_frames}\r\n```\r\n\r\nThe dataset creation process goes out of memory on a machine with 500GB RAM.\r\nI was under the impression that the \"generator\" here is exactly for that, to avoid memory constraints.\r\n\r\n\r\nHowever, even if you want the entire dataset in memory, it would be in the worst case\r\n`260x210x3 x 400 max length x 7000 samples` in bytes (uint8) = 458.64 gigabytes\r\nSo I'm not sure why it's taking more than 500GB.\r\n\r\nAnd the dataset creation fails after 170 examples on a machine with 120gb RAM, and after 672 examples on a machine with 500GB RAM.\r\n\r\n\r\n---\r\n\r\n## Info that might help:\r\nIterating over examples is extremely slow.\r\n![image](https:\/\/user-images.githubusercontent.com\/5757359\/96359590-3c666780-111d-11eb-9347-1f833ad982a9.png)\r\nIf I perform this iteration in my own, custom loop (Without saving to file), it runs at 8-9 examples\/sec\r\n\r\nAnd you can see at this state it is using 94% of the memory:\r\n![image](https:\/\/user-images.githubusercontent.com\/5757359\/96359606-7afc2200-111d-11eb-8c11-0afbdba1a6a3.png)\r\n\r\nAnd it is only using one CPU core, which is probably why it's so slow:\r\n![image](https:\/\/user-images.githubusercontent.com\/5757359\/96359630-a3841c00-111d-11eb-9ba0-7fd3cdf51d26.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/741\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/741\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/740","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/740\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/740\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/740\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/740","id":723047958,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA0NzAyNTc0","number":740,"title":"Fix TREC urls","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-16T09:11:28Z","updated_at":"2020-10-19T08:54:37Z","closed_at":"2020-10-19T08:54:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/740","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/740","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/740.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/740.patch","merged_at":"2020-10-19T08:54:35Z"},"body":"The old TREC urls are now redirections.\r\nI updated the urls to the new ones, since we don't support redirections for downloads.\r\n\r\nFix #737 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/740\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/740\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/739","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/739\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/739\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/739\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/739","id":723044066,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA0Njk5NTY3","number":739,"title":"Add wiki dpr multiset embeddings","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-10-16T09:05:49Z","updated_at":"2020-11-26T14:02:50Z","closed_at":"2020-11-26T14:02:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/739","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/739","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/739.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/739.patch","merged_at":"2020-11-26T14:02:49Z"},"body":"There are two DPR encoders, one trained on Natural Questions and one trained on a multiset\/hybrid dataset.\r\nPreviously only the embeddings from the encoder trained on NQ were available. I'm adding the ones from the encoder trained on the multiset\/hybrid dataset.\r\nIn the configuration you can now specify `embeddings_name=\"nq\"` or `embeddings_name=\"multiset\"`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/739\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/739\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/738","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/738\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/738\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/738\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/738","id":723033923,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA0NjkxNjM4","number":738,"title":"Replace seqeval code with original classification_report for simplicity","user":{"login":"Hironsan","id":6737785,"node_id":"MDQ6VXNlcjY3Mzc3ODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6737785?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Hironsan","html_url":"https:\/\/github.com\/Hironsan","followers_url":"https:\/\/api.github.com\/users\/Hironsan\/followers","following_url":"https:\/\/api.github.com\/users\/Hironsan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Hironsan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Hironsan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Hironsan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Hironsan\/orgs","repos_url":"https:\/\/api.github.com\/users\/Hironsan\/repos","events_url":"https:\/\/api.github.com\/users\/Hironsan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Hironsan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-10-16T08:51:45Z","updated_at":"2021-01-21T16:07:15Z","closed_at":"2020-10-19T10:31:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/738","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/738","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/738.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/738.patch","merged_at":"2020-10-19T10:31:11Z"},"body":"Recently, the original seqeval has enabled us to get per type scores and overall scores as a dictionary.\r\n\r\nThis PR replaces the current code with the original function(`classification_report`) to simplify it.\r\n\r\nAlso, the original code has been updated to fix #352.\r\n- Related issue: https:\/\/github.com\/chakki-works\/seqeval\/pull\/38\r\n\r\n\r\n```python\r\nfrom datasets import load_metric\r\nmetric = load_metric(\"seqeval\")\r\ny_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]\r\ny_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]\r\nmetric.compute(predictions=y_pred, references=y_true)\r\n# Output: {'MISC': {'precision': 0.0, 'recall': 0.0, 'f1': 0, 'number': 1}, 'PER': {'precision': 1.0, 'recall': 1.0, 'f1': 1.0, 'number': 1}, 'overall_precision': 0.5, 'overall_recall': 0.5, 'overall_f1': 0.5, 'overall_accuracy': 0.8}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/738\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/738\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/737","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/737\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/737\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/737\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/737","id":722463923,"node_id":"MDU6SXNzdWU3MjI0NjM5MjM=","number":737,"title":"Trec Dataset Connection Error","user":{"login":"aychang95","id":10554495,"node_id":"MDQ6VXNlcjEwNTU0NDk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10554495?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aychang95","html_url":"https:\/\/github.com\/aychang95","followers_url":"https:\/\/api.github.com\/users\/aychang95\/followers","following_url":"https:\/\/api.github.com\/users\/aychang95\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aychang95\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aychang95\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aychang95\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aychang95\/orgs","repos_url":"https:\/\/api.github.com\/users\/aychang95\/repos","events_url":"https:\/\/api.github.com\/users\/aychang95\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aychang95\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-15T15:57:53Z","updated_at":"2020-10-19T08:54:36Z","closed_at":"2020-10-19T08:54:36Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Datasets Version:**\r\n1.1.2\r\n\r\n**Python Version:**\r\n3.6\/3.7\r\n\r\n\r\n**Code:**\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset(\"trec\")\r\n```\r\n\r\n**Expected behavior:**\r\nDownload Trec dataset and load Dataset object\r\n\r\n**Current Behavior:**\r\nGet a connection error saying it couldn't reach http:\/\/cogcomp.org\/Data\/QA\/QC\/train_5500.label (but the link doesn't seem broken)\r\n\r\n
\r\n Error Logs<\/summary>\r\n \r\n\r\nUsing custom data configuration default\r\nDownloading and preparing dataset trec\/default (download: 350.79 KiB, generated: 403.39 KiB, post-processed: Unknown size, total: 754.18 KiB) to \/root\/.cache\/huggingface\/datasets\/trec\/default\/1.1.0\/ca4248481ad244f235f4cf277186cad2ee8769f975119a2bbfc41b8932b88bd7...\r\n---------------------------------------------------------------------------\r\nConnectionError Traceback (most recent call last)\r\n in ()\r\n----> 1 load_dataset(\"trec\")\r\n\r\n10 frames\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag)\r\n 473 elif response is not None and response.status_code == 404:\r\n 474 raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\n--> 475 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n 476 \r\n 477 # Try a second time\r\n\r\nConnectionError: Couldn't reach http:\/\/cogcomp.org\/Data\/QA\/QC\/train_5500.label\r\n\r\n<\/details>","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/737\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/737\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/736","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/736\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/736\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/736\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/736","id":722348191,"node_id":"MDExOlB1bGxSZXF1ZXN0NTA0MTE0MjMy","number":736,"title":"Start community-provided dataset docs","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-10-15T13:41:39Z","updated_at":"2020-10-23T13:15:28Z","closed_at":"2020-10-23T13:15:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/736","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/736","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/736.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/736.patch","merged_at":null},"body":"This is one I did to get the pseudo-labels updated. Not sure if it generalizes, but I figured I would write it down. It was pretty easy because all I had to do was make properly formatted directories and change URLs.\r\n\r\n+ In slack @thomwolf called it a `user-namespace` dataset, but the docs call it `community dataset`.\r\nI think the first naming is clearer, but I didn't address that here.\r\n\r\n\r\n+ I didn't add metadata, will try that.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/736\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/736\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/735","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/735\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/735\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/735\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/735","id":722225270,"node_id":"MDU6SXNzdWU3MjIyMjUyNzA=","number":735,"title":"Throw error when an unexpected key is used in data_files","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-15T10:55:27Z","updated_at":"2020-10-30T13:23:52Z","closed_at":"2020-10-30T13:23:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I have found that only \"train\", \"validation\" and \"test\" are valid keys in the `data_files` argument. When you use any other ones, those attached files are silently ignored - leading to unexpected behaviour for the users.\r\n\r\nSo the following, unintuitively, returns only one key (namely `train`).\r\n\r\n```python\r\ndatasets = load_dataset(\"text\", data_files={\"train\": train_f, \"valid\": valid_f})\r\nprint(datasets.keys())\r\n# dict_keys(['train'])\r\n```\r\n\r\nwhereas using `validation` instead, does return the expected result:\r\n\r\n```python\r\ndatasets = load_dataset(\"text\", data_files={\"train\": train_f, \"validation\": valid_f})\r\nprint(datasets.keys())\r\n# dict_keys(['train', 'validation'])\r\n```\r\n\r\nI would like to see more freedom in which keys one can use, but if that is not possible at least an error should be thrown when using an unexpected key.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/735\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/735\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/734","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/734\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/734\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/734\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/734","id":721767848,"node_id":"MDExOlB1bGxSZXF1ZXN0NTAzNjMwMDcz","number":734,"title":"Fix GLUE metric description","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-14T20:44:14Z","updated_at":"2020-10-15T09:27:43Z","closed_at":"2020-10-15T09:27:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/734","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/734","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/734.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/734.patch","merged_at":"2020-10-15T09:27:42Z"},"body":"Small typo: the description says translation instead of prediction.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/734\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/734\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/733","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/733\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/733\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/733\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/733","id":721366744,"node_id":"MDExOlB1bGxSZXF1ZXN0NTAzMjk2NDQw","number":733,"title":"Update link to dataset viewer","user":{"login":"negedng","id":12969168,"node_id":"MDQ6VXNlcjEyOTY5MTY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12969168?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/negedng","html_url":"https:\/\/github.com\/negedng","followers_url":"https:\/\/api.github.com\/users\/negedng\/followers","following_url":"https:\/\/api.github.com\/users\/negedng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/negedng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/negedng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/negedng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/negedng\/orgs","repos_url":"https:\/\/api.github.com\/users\/negedng\/repos","events_url":"https:\/\/api.github.com\/users\/negedng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/negedng\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-14T11:13:23Z","updated_at":"2020-10-14T14:07:31Z","closed_at":"2020-10-14T14:07:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/733","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/733","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/733.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/733.patch","merged_at":"2020-10-14T14:07:31Z"},"body":"Change 404 error links in quick tour to working ones","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/733\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/733\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/732","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/732\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/732\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/732\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/732","id":721359448,"node_id":"MDExOlB1bGxSZXF1ZXN0NTAzMjkwMjEy","number":732,"title":"dataset(wlasl): initial loading script","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-10-14T11:01:42Z","updated_at":"2021-03-23T06:19:43Z","closed_at":"2021-03-23T06:19:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/732","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/732","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/732.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/732.patch","merged_at":null},"body":"takes like 9-10 hours to download all of the videos for the dataset, but it does finish :) ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/732\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/732\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/731","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/731\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/731\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/731\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/731","id":721142985,"node_id":"MDExOlB1bGxSZXF1ZXN0NTAzMTExNzc4","number":731,"title":"dataset(aslg_pc12): initial loading script","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-10-14T05:14:37Z","updated_at":"2020-10-28T15:27:06Z","closed_at":"2020-10-28T15:27:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/731","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/731","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/731.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/731.patch","merged_at":"2020-10-28T15:27:06Z"},"body":"This contains the only current public part of this corpus.\r\n\r\nThe rest of the corpus is not yet been made public, but this sample is still being used by researchers.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/731\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/731\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/730","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/730\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/730\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/730\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/730","id":721073812,"node_id":"MDU6SXNzdWU3MjEwNzM4MTI=","number":730,"title":"Possible caching bug","user":{"login":"ArneBinder","id":3375489,"node_id":"MDQ6VXNlcjMzNzU0ODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3375489?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ArneBinder","html_url":"https:\/\/github.com\/ArneBinder","followers_url":"https:\/\/api.github.com\/users\/ArneBinder\/followers","following_url":"https:\/\/api.github.com\/users\/ArneBinder\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ArneBinder\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ArneBinder\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ArneBinder\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ArneBinder\/orgs","repos_url":"https:\/\/api.github.com\/users\/ArneBinder\/repos","events_url":"https:\/\/api.github.com\/users\/ArneBinder\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ArneBinder\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-10-14T02:02:34Z","updated_at":"2021-11-28T14:28:57Z","closed_at":"2020-10-29T09:36:01Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The following code with `test1.txt` containing just \"\ud83e\udd17\ud83e\udd17\ud83e\udd17\":\r\n```\r\ndataset = datasets.load_dataset('text', data_files=['test1.txt'], split=\"train\", encoding=\"latin_1\")\r\nprint(dataset[0])\r\ndataset = datasets.load_dataset('text', data_files=['test1.txt'], split=\"train\", encoding=\"utf-8\")\r\nprint(dataset[0])\r\n``` \r\nproduces this output:\r\n```\r\nDownloading and preparing dataset text\/default-15600e4d83254059 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/arne\/.cache\/huggingface\/datasets\/text\/default-15600e4d83254059\/0.0.0\/52cefbb2b82b015d4253f1aeb1e6ee5591124a6491e834acfe1751f765925155...\r\nDataset text downloaded and prepared to \/home\/arne\/.cache\/huggingface\/datasets\/text\/default-15600e4d83254059\/0.0.0\/52cefbb2b82b015d4253f1aeb1e6ee5591124a6491e834acfe1751f765925155. Subsequent calls will reuse this data.\r\n{'text': '\u00f0\\x9f\u00a4\\x97\u00f0\\x9f\u00a4\\x97\u00f0\\x9f\u00a4\\x97'}\r\nUsing custom data configuration default\r\nReusing dataset text (\/home\/arne\/.cache\/huggingface\/datasets\/text\/default-15600e4d83254059\/0.0.0\/52cefbb2b82b015d4253f1aeb1e6ee5591124a6491e834acfe1751f765925155)\r\n{'text': '\u00f0\\x9f\u00a4\\x97\u00f0\\x9f\u00a4\\x97\u00f0\\x9f\u00a4\\x97'}\r\n```\r\nJust changing the order (and deleting the temp files):\r\n```\r\ndataset = datasets.load_dataset('text', data_files=['test1.txt'], split=\"train\", encoding=\"utf-8\")\r\nprint(dataset[0])\r\ndataset = datasets.load_dataset('text', data_files=['test1.txt'], split=\"train\", encoding=\"latin_1\")\r\nprint(dataset[0])\r\n```\r\nproduces this:\r\n```\r\nUsing custom data configuration default\r\nDownloading and preparing dataset text\/default-15600e4d83254059 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/arne\/.cache\/huggingface\/datasets\/text\/default-15600e4d83254059\/0.0.0\/52cefbb2b82b015d4253f1aeb1e6ee5591124a6491e834acfe1751f765925155...\r\nDataset text downloaded and prepared to \/home\/arne\/.cache\/huggingface\/datasets\/text\/default-15600e4d83254059\/0.0.0\/52cefbb2b82b015d4253f1aeb1e6ee5591124a6491e834acfe1751f765925155. Subsequent calls will reuse this data.\r\n{'text': '\ud83e\udd17\ud83e\udd17\ud83e\udd17'}\r\nUsing custom data configuration default\r\nReusing dataset text (\/home\/arne\/.cache\/huggingface\/datasets\/text\/default-15600e4d83254059\/0.0.0\/52cefbb2b82b015d4253f1aeb1e6ee5591124a6491e834acfe1751f765925155)\r\n{'text': '\ud83e\udd17\ud83e\udd17\ud83e\udd17'}\r\n```\r\n\r\nIs it intended that the cache path does not depend on the config entries?\r\n\r\ntested with datasets==1.1.2 and python==3.8.5","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/730\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/730\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/729","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/729\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/729\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/729\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/729","id":719558876,"node_id":"MDU6SXNzdWU3MTk1NTg4NzY=","number":729,"title":"Better error message when one forgets to call `add_batch` before `compute`","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-12T17:59:22Z","updated_at":"2020-10-29T15:18:24Z","closed_at":"2020-10-29T15:18:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When using metrics, if for some reason a user forgets to call `add_batch` to a metric before `compute` (with no arguments), the error message is a bit cryptic and could probably be made clearer.\r\n\r\n## Reproducer\r\n\r\n```python\r\nimport datasets\r\nimport torch\r\nfrom datasets import Metric\r\n\r\nclass GatherMetric(Metric):\r\n def _info(self):\r\n return datasets.MetricInfo(\r\n description=\"description\",\r\n citation=\"citation\",\r\n inputs_description=\"kwargs\",\r\n features=datasets.Features({\r\n 'predictions': datasets.Value('int64'),\r\n 'references': datasets.Value('int64'),\r\n }),\r\n codebase_urls=[],\r\n reference_urls=[],\r\n format='numpy'\r\n )\r\n\r\n def _compute(self, predictions, references):\r\n return {\"predictions\": predictions, \"labels\": references}\r\n\r\nmetric = GatherMetric(cache_dir=\"test-metric\")\r\ninputs = torch.randint(0, 2, (1024,))\r\ntargets = torch.randint(0, 2, (1024,))\r\n\r\nbatch_size = 8\r\nfor i in range(0, 1024, batch_size):\r\n pass # User forgets to call `add_batch`\r\nresult = metric.compute()\r\n```\r\n\r\n## Stack trace:\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n 3 pass\r\n 4 # metric.add_batch(predictions=inputs[i:i+batch_size], references=targets[i:i+batch_size])\r\n----> 5 result = metric.compute()\r\n\r\n~\/git\/datasets\/src\/datasets\/metric.py in compute(self, *args, **kwargs)\r\n 380 if predictions is not None:\r\n 381 self.add_batch(predictions=predictions, references=references)\r\n--> 382 self._finalize()\r\n 383 \r\n 384 self.cache_file_name = None\r\n\r\n~\/git\/datasets\/src\/datasets\/metric.py in _finalize(self)\r\n 343 elif self.process_id == 0:\r\n 344 # Let's acquire a lock on each node files to be sure they are finished writing\r\n--> 345 file_paths, filelocks = self._get_all_cache_files()\r\n 346 \r\n 347 # Read the predictions and references\r\n\r\n~\/git\/datasets\/src\/datasets\/metric.py in _get_all_cache_files(self)\r\n 280 filelocks = []\r\n 281 for process_id, file_path in enumerate(file_paths):\r\n--> 282 filelock = FileLock(file_path + \".lock\")\r\n 283 try:\r\n 284 filelock.acquire(timeout=self.timeout)\r\n\r\nTypeError: unsupported operand type(s) for +: 'NoneType' and 'str'\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/729\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/729\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/728","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/728\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/728\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/728\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/728","id":719555780,"node_id":"MDU6SXNzdWU3MTk1NTU3ODA=","number":728,"title":"Passing `cache_dir` to a metric does not work","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-12T17:55:14Z","updated_at":"2020-10-29T09:34:42Z","closed_at":"2020-10-29T09:34:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When passing `cache_dir` to a custom metric, the folder is concatenated to itself at some point and this results in a FileNotFoundError:\r\n\r\n## Reproducer\r\n\r\n```python\r\nimport datasets\r\nimport torch\r\nfrom datasets import Metric\r\n\r\nclass GatherMetric(Metric):\r\n def _info(self):\r\n return datasets.MetricInfo(\r\n description=\"description\",\r\n citation=\"citation\",\r\n inputs_description=\"kwargs\",\r\n features=datasets.Features({\r\n 'predictions': datasets.Value('int64'),\r\n 'references': datasets.Value('int64'),\r\n }),\r\n codebase_urls=[],\r\n reference_urls=[],\r\n format='numpy'\r\n )\r\n\r\n def _compute(self, predictions, references):\r\n return {\"predictions\": predictions, \"labels\": references}\r\n\r\nmetric = GatherMetric(cache_dir=\"test-metric\")\r\ninputs = torch.randint(0, 2, (1024,))\r\ntargets = torch.randint(0, 2, (1024,))\r\n\r\nbatch_size = 8\r\nfor i in range(0, 1024, batch_size):\r\n metric.add_batch(predictions=inputs[i:i+batch_size], references=targets[i:i+batch_size])\r\nresult = metric.compute()\r\n```\r\n\r\n## Stack trace:\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nFileNotFoundError Traceback (most recent call last)\r\n~\/git\/datasets\/src\/datasets\/metric.py in _finalize(self)\r\n 349 reader = ArrowReader(path=self.data_dir, info=DatasetInfo(features=self.features))\r\n--> 350 self.data = Dataset(**reader.read_files([{\"filename\": f} for f in file_paths]))\r\n 351 except FileNotFoundError:\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_reader.py in read_files(self, files, original_instructions)\r\n 227 # Prepend path to filename\r\n--> 228 pa_table = self._read_files(files)\r\n 229 files = copy.deepcopy(files)\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_reader.py in _read_files(self, files)\r\n 166 for f_dict in files:\r\n--> 167 pa_table: pa.Table = self._get_dataset_from_filename(f_dict)\r\n 168 pa_tables.append(pa_table)\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_reader.py in _get_dataset_from_filename(self, filename_skip_take)\r\n 291 )\r\n--> 292 mmap = pa.memory_map(filename)\r\n 293 f = pa.ipc.open_stream(mmap)\r\n\r\n~\/.pyenv\/versions\/3.7.9\/envs\/base\/lib\/python3.7\/site-packages\/pyarrow\/io.pxi in pyarrow.lib.memory_map()\r\n\r\n~\/.pyenv\/versions\/3.7.9\/envs\/base\/lib\/python3.7\/site-packages\/pyarrow\/io.pxi in pyarrow.lib.MemoryMappedFile._open()\r\n\r\n~\/.pyenv\/versions\/3.7.9\/envs\/base\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/.pyenv\/versions\/3.7.9\/envs\/base\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nFileNotFoundError: [Errno 2] Failed to open local file 'test-metric\/gather_metric\/default\/test-metric\/gather_metric\/default\/default_experiment-1-0.arrow'. Detail: [errno 2] No such file or directory\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nValueError Traceback (most recent call last)\r\n in \r\n 2 for i in range(0, 1024, batch_size):\r\n 3 metric.add_batch(predictions=inputs[i:i+batch_size], references=targets[i:i+batch_size])\r\n----> 4 result = metric.compute()\r\n\r\n~\/git\/datasets\/src\/datasets\/metric.py in compute(self, *args, **kwargs)\r\n 380 if predictions is not None:\r\n 381 self.add_batch(predictions=predictions, references=references)\r\n--> 382 self._finalize()\r\n 383 \r\n 384 self.cache_file_name = None\r\n\r\n~\/git\/datasets\/src\/datasets\/metric.py in _finalize(self)\r\n 351 except FileNotFoundError:\r\n 352 raise ValueError(\r\n--> 353 \"Error in finalize: another metric instance is already using the local cache file. \"\r\n 354 \"Please specify an experiment_id to avoid colision between distributed metric instances.\"\r\n 355 )\r\n\r\nValueError: Error in finalize: another metric instance is already using the local cache file. Please specify an experiment_id to avoid colision between distributed metric instances.\r\n```\r\n\r\nThe code works when we remove the `cache_dir=...` from the metric.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/728\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/728\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/727","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/727\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/727\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/727\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/727","id":719386366,"node_id":"MDU6SXNzdWU3MTkzODYzNjY=","number":727,"title":"Parallel downloads progress bar flickers","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-12T13:36:05Z","updated_at":"2020-10-12T13:36:05Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When there are parallel downloads using the download manager, the tqdm progress bar flickers since all the progress bars are on the same line.\r\n\r\nTo fix that we could simply specify `position=i` for i=0 to n the number of files to download when instantiating the tqdm progress bar. \r\n\r\nAnother way would be to have one \"master\" progress bar that tracks the number of finished downloads, and then one progress bar per process that show the current downloads.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/727\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/727\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/726","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/726\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/726\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/726\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/726","id":719313754,"node_id":"MDU6SXNzdWU3MTkzMTM3NTQ=","number":726,"title":"\"Checksums didn't match for dataset source files\" error while loading openwebtext dataset","user":{"login":"SparkJiao","id":16469472,"node_id":"MDQ6VXNlcjE2NDY5NDcy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16469472?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SparkJiao","html_url":"https:\/\/github.com\/SparkJiao","followers_url":"https:\/\/api.github.com\/users\/SparkJiao\/followers","following_url":"https:\/\/api.github.com\/users\/SparkJiao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SparkJiao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SparkJiao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SparkJiao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SparkJiao\/orgs","repos_url":"https:\/\/api.github.com\/users\/SparkJiao\/repos","events_url":"https:\/\/api.github.com\/users\/SparkJiao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SparkJiao\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-10-12T11:45:10Z","updated_at":"2021-10-10T01:52:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI have encountered this problem during loading the openwebtext dataset:\r\n```\r\n>>> dataset = load_dataset('openwebtext')\r\nDownloading and preparing dataset openwebtext\/plain_text (download: 12.00 GiB, generated: 37.04 GiB, post-processed: Unknown size, total: 49.03 GiB) to \/home\/admin\/.cache\/huggingface\/datasets\/openwebtext\/plain_text\/1.0.0\/5c636399c7155da97c982d0d70ecdce30fbca66a4eb4fc768ad91f8331edac02...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/admin\/workspace\/anaconda3\/envs\/torch1.6-py3.7\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/home\/admin\/workspace\/anaconda3\/envs\/torch1.6-py3.7\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 476, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/admin\/workspace\/anaconda3\/envs\/torch1.6-py3.7\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 536, in _download_and_prepare\r\n self.info.download_checksums, dl_manager.get_recorded_sizes_checksums(), \"dataset source files\"\r\n File \"\/home\/admin\/workspace\/anaconda3\/envs\/torch1.6-py3.7\/lib\/python3.7\/site-packages\/datasets\/utils\/info_utils.py\", line 39, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/zenodo.org\/record\/3834942\/files\/openwebtext.tar.xz']\r\n```\r\nI think this problem is caused because the released dataset has changed. Or I should download the dataset manually?\r\n\r\nSorry for release the unfinised issue by mistake.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/726\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/726\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/725","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/725\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/725\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/725\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/725","id":718985641,"node_id":"MDExOlB1bGxSZXF1ZXN0NTAxMjUxODI1","number":725,"title":"pretty print dataset objects","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-10-12T02:03:46Z","updated_at":"2020-10-23T16:24:35Z","closed_at":"2020-10-23T09:00:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/725","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/725","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/725.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/725.patch","merged_at":"2020-10-23T09:00:46Z"},"body":"Currently, if I do:\r\n```\r\nfrom datasets import load_dataset\r\nload_dataset(\"wikihow\", 'all', data_dir=\"\/hf\/pegasus-datasets\/wikihow\/\")\r\n```\r\nI get:\r\n```\r\n\r\nDatasetDict({'train': Dataset(features: {'text': Value(dtype='string', id=None),\r\n'headline': Value(dtype='string', id=None), 'title': Value(dtype='string',\r\nid=None)}, num_rows: 157252), 'validation': Dataset(features: {'text':\r\nValue(dtype='string', id=None), 'headline': Value(dtype='string', id=None),\r\n'title': Value(dtype='string', id=None)}, num_rows: 5599), 'test':\r\nDataset(features: {'text': Value(dtype='string', id=None), 'headline':\r\nValue(dtype='string', id=None), 'title': Value(dtype='string', id=None)},\r\nnum_rows: 5577)})\r\n```\r\n\r\nThis is not very readable. \r\n\r\nCan we either have a better `__repr__` or have a custom method to nicely pprint the dataset object? \r\n\r\nHere is my very simple attempt. With this PR, it produces:\r\n```\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['text', 'headline', 'title'],\r\n num_rows: 157252\r\n })\r\n validation: Dataset({\r\n features: ['text', 'headline', 'title'],\r\n num_rows: 5599\r\n })\r\n test: Dataset({\r\n features: ['text', 'headline', 'title'],\r\n num_rows: 5577\r\n })\r\n})\r\n```\r\nI did omit the data types on purpose to make it more readable, but it shouldn't be too difficult to integrate those too.\r\n\r\nnote that this PR also fixes the inconsistency in output that in master misses enclosing `{}` for Dataset, but it is there for `DatasetDict` - or perhaps it was by design.\r\n\r\nI'm totally not attached to this format, just wanting something more readable. One approach could be to serialize to `json.dumps` or something similar. It'd make the indentation simpler.\r\n\r\nThank you.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/725\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/725\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/724","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/724\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/724\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/724\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/724","id":718947700,"node_id":"MDU6SXNzdWU3MTg5NDc3MDA=","number":724,"title":"need to redirect \/nlp to \/datasets and remove outdated info","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-10-11T23:12:12Z","updated_at":"2020-10-14T17:00:12Z","closed_at":"2020-10-14T17:00:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It looks like the website still has all the `nlp` data, e.g.: https:\/\/huggingface.co\/nlp\/viewer\/?dataset=wikihow&config=all\r\n\r\nshould probably redirect to: https:\/\/huggingface.co\/datasets\/wikihow\r\n\r\nalso for some reason the new information is slightly borked. If you look at the old one it was nicely formatted and had the links marked up, the new one is just a jumble of text in one chunk and no markup for links (i.e. not clickable).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/724\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/724\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/723","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/723\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/723\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/723\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/723","id":718926723,"node_id":"MDU6SXNzdWU3MTg5MjY3MjM=","number":723,"title":"Adding pseudo-labels to datasets","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"assignees":[{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":8,"created_at":"2020-10-11T21:05:45Z","updated_at":"2021-08-03T05:11:51Z","closed_at":"2021-08-03T05:11:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I recently [uploaded pseudo-labels](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/seq2seq\/precomputed_pseudo_labels.md) for CNN\/DM, XSUM and WMT16-en-ro to s3, and thom mentioned I should add them to this repo.\r\nSince pseudo-labels are just a large model's generations on an existing dataset, what is the right way to structure this contribution.\r\nI read https:\/\/huggingface.co\/docs\/datasets\/add_dataset.html, but it doesn't really cover this type of contribution.\r\n\r\nI could, for example, make a new directory, `xsum_bart_pseudolabels` for each set of pseudolabels or add some sort of parametrization to `xsum.py`: https:\/\/github.com\/huggingface\/datasets\/blob\/5f4c6e830f603830117877b8990a0e65a2386aa6\/datasets\/xsum\/xsum.py\r\n\r\nWhat do you think @lhoestq ?\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/723\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/723\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/722","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/722\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/722\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/722\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/722","id":718689117,"node_id":"MDExOlB1bGxSZXF1ZXN0NTAxMDI3NjAw","number":722,"title":"datasets(RWTH-PHOENIX-Weather 2014 T): add initial loading script","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-10-10T19:44:08Z","updated_at":"2021-01-05T07:06:51Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/722","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/722","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/722.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/722.patch","merged_at":null},"body":"This is the first sign language dataset in this repo as far as I know.\r\nFollowing an old issue I opened https:\/\/github.com\/huggingface\/datasets\/issues\/302.\r\n\r\nI added the dataset official REAMDE file, but I see it's not very standard, so it can be removed.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/722\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/722\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/721","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/721\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/721\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/721\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/721","id":718647147,"node_id":"MDU6SXNzdWU3MTg2NDcxNDc=","number":721,"title":"feat(dl_manager): add support for ftp downloads","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-10-10T15:50:20Z","updated_at":"2020-10-24T09:24:33Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am working on a new dataset (#302) and encounter a problem downloading it.\r\n\r\n```python\r\n# This is the official download link from https:\/\/www-i6.informatik.rwth-aachen.de\/~koller\/RWTH-PHOENIX-2014-T\/\r\n_URL = \"ftp:\/\/wasserstoff.informatik.rwth-aachen.de\/pub\/rwth-phoenix\/2016\/phoenix-2014-T.v3.tar.gz\"\r\n\r\ndl_manager.download_and_extract(_URL)\r\n```\r\n\r\nI get an error:\r\n\r\n> ValueError: unable to parse ftp:\/\/wasserstoff.informatik.rwth-aachen.de\/pub\/rwth-phoenix\/2016\/phoenix-2014-T.v3.tar.gz as a URL or as a local path\r\n\r\nI checked, and indeed you don't consider `ftp` as a remote file.\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/4c2af707a6955cf4b45f83ac67990395327c5725\/src\/datasets\/utils\/file_utils.py#L188\r\n\r\nAdding `ftp` to that list does not immediately solve the issue, so there probably needs to be some extra work.\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/721\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/721\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/720","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/720\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/720\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/720\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/720","id":716581266,"node_id":"MDU6SXNzdWU3MTY1ODEyNjY=","number":720,"title":"OSError: Cannot find data file when not using the dummy dataset in RAG","user":{"login":"josemlopez","id":4112135,"node_id":"MDQ6VXNlcjQxMTIxMzU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4112135?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/josemlopez","html_url":"https:\/\/github.com\/josemlopez","followers_url":"https:\/\/api.github.com\/users\/josemlopez\/followers","following_url":"https:\/\/api.github.com\/users\/josemlopez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/josemlopez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/josemlopez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/josemlopez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/josemlopez\/orgs","repos_url":"https:\/\/api.github.com\/users\/josemlopez\/repos","events_url":"https:\/\/api.github.com\/users\/josemlopez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/josemlopez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2020-10-07T14:27:13Z","updated_at":"2020-12-23T14:04:31Z","closed_at":"2020-12-23T14:04:31Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Environment info\r\n\r\n transformers version: 3.3.1\r\n Platform: Linux-4.19\r\n Python version: 3.7.7\r\n PyTorch version (GPU?): 1.6.0\r\n Tensorflow version (GPU?): No\r\n Using GPU in script?: Yes\r\n Using distributed or parallel set-up in script?: No\r\n\r\n## To reproduce\r\n\r\nSteps to reproduce the behaviour:\r\n```\r\nimport os\r\nos.environ['HF_DATASETS_CACHE'] = '\/workspace\/notebooks\/POCs\/cache'\r\n\r\nfrom transformers import RagTokenizer, RagRetriever, RagTokenForGeneration\r\n\r\ntokenizer = RagTokenizer.from_pretrained(\"facebook\/rag-token-nq\")\r\nretriever = RagRetriever.from_pretrained(\"facebook\/rag-token-nq\", index_name=\"exact\", use_dummy_dataset=False) \r\n```\r\n\r\nPlese note that I'm using the whole dataset: **use_dummy_dataset=False**\r\nAfter around 4 hours (downloading and some other things) this is returned:\r\n\r\n```\r\nDownloading and preparing dataset wiki_dpr\/psgs_w100.nq.exact (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/workspace\/notebooks\/POCs\/cache\/wiki_dpr\/psgs_w100.nq.exact\/0.0.0\/14b973bf2a456087ff69c0fd34526684eed22e48e0dfce4338f9a22b965ce7c2...\r\n\r\n---------------------------------------------------------------------------\r\nUnpicklingError Traceback (most recent call last)\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/numpy\/lib\/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding)\r\n 459 try:\r\n--> 460 return pickle.load(fid, **pickle_kwargs)\r\n 461 except Exception:\r\n\r\nUnpicklingError: pickle data was truncated\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nOSError Traceback (most recent call last)\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 552 # Prepare split will record examples associated to the split\r\n--> 553 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 554 except OSError:\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/builder.py in _prepare_split(self, split_generator)\r\n 840 for key, record in utils.tqdm(\r\n--> 841 generator, unit=\" examples\", total=split_info.num_examples, leave=False, disable=not_verbose\r\n 842 ):\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/tqdm\/notebook.py in __iter__(self, *args, **kwargs)\r\n 217 try:\r\n--> 218 for obj in super(tqdm_notebook, self).__iter__(*args, **kwargs):\r\n 219 # return super(tqdm...) will not catch exception\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/tqdm\/std.py in __iter__(self)\r\n 1128 try:\r\n-> 1129 for obj in iterable:\r\n 1130 yield obj\r\n\r\n~\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wiki_dpr\/14b973bf2a456087ff69c0fd34526684eed22e48e0dfce4338f9a22b965ce7c2\/wiki_dpr.py in _generate_examples(self, data_file, vectors_files)\r\n 131 break\r\n--> 132 vecs = np.load(open(vectors_files.pop(0), \"rb\"), allow_pickle=True)\r\n 133 vec_idx = 0\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/numpy\/lib\/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding)\r\n 462 raise IOError(\r\n--> 463 \"Failed to interpret file %s as a pickle\" % repr(file))\r\n 464 finally:\r\n\r\nOSError: Failed to interpret file <_io.BufferedReader name='\/workspace\/notebooks\/POCs\/cache\/downloads\/f34d5f091294259b4ca90e813631e69a6ded660d71b6cbedf89ddba50df94448'> as a pickle\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nOSError Traceback (most recent call last)\r\n in \r\n 1 # ln -s \/workspace\/notebooks\/POCs\/cache \/root\/.cache\/huggingface\/datasets\r\n----> 2 retriever = RagRetriever.from_pretrained(\"facebook\/rag-token-nq\", index_name=\"exact\", use_dummy_dataset=False)\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/transformers\/retrieval_rag.py in from_pretrained(cls, retriever_name_or_path, **kwargs)\r\n 307 generator_tokenizer = rag_tokenizer.generator\r\n 308 return cls(\r\n--> 309 config, question_encoder_tokenizer=question_encoder_tokenizer, generator_tokenizer=generator_tokenizer\r\n 310 )\r\n 311 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/transformers\/retrieval_rag.py in __init__(self, config, question_encoder_tokenizer, generator_tokenizer)\r\n 298 self.config = config\r\n 299 if self._init_retrieval:\r\n--> 300 self.init_retrieval()\r\n 301 \r\n 302 @classmethod\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/transformers\/retrieval_rag.py in init_retrieval(self)\r\n 324 \r\n 325 logger.info(\"initializing retrieval\")\r\n--> 326 self.index.init_index()\r\n 327 \r\n 328 def postprocess_docs(self, docs, input_strings, prefix, n_docs, return_tensors=None):\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/transformers\/retrieval_rag.py in init_index(self)\r\n 238 split=self.dataset_split,\r\n 239 index_name=self.index_name,\r\n--> 240 dummy=self.use_dummy_dataset,\r\n 241 )\r\n 242 self.dataset.set_format(\"numpy\", columns=[\"embeddings\"], output_all_columns=True)\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, save_infos, script_version, **config_kwargs)\r\n 609 download_config=download_config,\r\n 610 download_mode=download_mode,\r\n--> 611 ignore_verifications=ignore_verifications,\r\n 612 )\r\n 613 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n 474 if not downloaded_from_gcs:\r\n 475 self._download_and_prepare(\r\n--> 476 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 477 )\r\n 478 # Sync info\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 553 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 554 except OSError:\r\n--> 555 raise OSError(\"Cannot find data file. \" + (self.manual_download_instructions or \"\"))\r\n 556 \r\n 557 if verify_infos:\r\n\r\nOSError: Cannot find data file. \r\n\r\n```\r\n\r\nThanks \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/720\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/720\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/719","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/719\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/719\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/719\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/719","id":716492263,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk5MjE5Mjg2","number":719,"title":"Fix train_test_split output format","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-07T12:39:01Z","updated_at":"2020-10-07T13:38:08Z","closed_at":"2020-10-07T13:38:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/719","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/719","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/719.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/719.patch","merged_at":"2020-10-07T13:38:06Z"},"body":"There was an issue in the `transmit_format` wrapper that returned bad formats when using train_test_split.\r\nThis was due to `column_names` being handled as a List[str] instead of Dict[str, List[str]] when the dataset transform (train_test_split) returns a DatasetDict (one set of column names per split).\r\n\r\nThis should fix @timothyjlaurent 's issue in #620 and fix #676 \r\n\r\nI added tests for `transmit_format` so that it doesn't happen again","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/719\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/719\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/718","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/718\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/718\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/718\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/718","id":715694709,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk4NTU5MDcw","number":718,"title":"Don't use tqdm 4.50.0","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-06T13:45:53Z","updated_at":"2020-10-06T13:49:24Z","closed_at":"2020-10-06T13:49:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/718","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/718","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/718.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/718.patch","merged_at":"2020-10-06T13:49:22Z"},"body":"tqdm 4.50.0 introduced permission errors on windows\r\nsee [here](https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/235\/workflows\/cfb6a39f-68eb-4802-8b17-2cd5e8ea7369\/jobs\/1111) for the error details.\r\n\r\nFor now I just added `<4.50.0` in the setup.py\r\nHopefully we can find what's wrong with this version soon","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/718\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/718\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/717","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/717\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/717\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/717\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/717","id":714959268,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk3OTUwOTA2","number":717,"title":"Fixes #712 Error in the Overview.ipynb notebook","user":{"login":"subhrm","id":850012,"node_id":"MDQ6VXNlcjg1MDAxMg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/850012?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/subhrm","html_url":"https:\/\/github.com\/subhrm","followers_url":"https:\/\/api.github.com\/users\/subhrm\/followers","following_url":"https:\/\/api.github.com\/users\/subhrm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/subhrm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/subhrm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/subhrm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/subhrm\/orgs","repos_url":"https:\/\/api.github.com\/users\/subhrm\/repos","events_url":"https:\/\/api.github.com\/users\/subhrm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/subhrm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-05T15:50:41Z","updated_at":"2020-10-06T06:31:43Z","closed_at":"2020-10-05T16:25:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/717","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/717","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/717.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/717.patch","merged_at":"2020-10-05T16:25:40Z"},"body":"Fixes #712 Error in the Overview.ipynb notebook by adding `with_details=True` parameter to `list_datasets` function in Cell 3 of **overview** notebook","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/717\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/717\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/716","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/716\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/716\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/716\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/716","id":714952888,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk3OTQ1ODAw","number":716,"title":"Fixes #712 Attribute error in cell 3 of the overview notebook","user":{"login":"subhrm","id":850012,"node_id":"MDQ6VXNlcjg1MDAxMg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/850012?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/subhrm","html_url":"https:\/\/github.com\/subhrm","followers_url":"https:\/\/api.github.com\/users\/subhrm\/followers","following_url":"https:\/\/api.github.com\/users\/subhrm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/subhrm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/subhrm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/subhrm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/subhrm\/orgs","repos_url":"https:\/\/api.github.com\/users\/subhrm\/repos","events_url":"https:\/\/api.github.com\/users\/subhrm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/subhrm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-05T15:42:09Z","updated_at":"2020-10-05T15:46:38Z","closed_at":"2020-10-05T15:46:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/716","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/716","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/716.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/716.patch","merged_at":null},"body":"Fixes the Attribute error in cell 3 of the overview notebook","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/716\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/716\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/715","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/715\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/715\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/715\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/715","id":714690192,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk3NzMwMDQ2","number":715,"title":"Use python read for text dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-10-05T09:47:55Z","updated_at":"2020-10-05T13:13:18Z","closed_at":"2020-10-05T13:13:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/715","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/715","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/715.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/715.patch","merged_at":"2020-10-05T13:13:16Z"},"body":"As mentioned in #622 the pandas reader used for text dataset doesn't work properly when there are \\r characters in the text file.\r\n\r\nInstead I switched to pure python using `open` and `read`.\r\nFrom my benchmark on a 100MB text file, it's the same speed as the previous pandas reader.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/715\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":3,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/715\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/714","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/714\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/714\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/714\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/714","id":714487881,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk3NTYzNjAx","number":714,"title":"Add the official dependabot implementation","user":{"login":"ALazyMeme","id":12804673,"node_id":"MDQ6VXNlcjEyODA0Njcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12804673?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ALazyMeme","html_url":"https:\/\/github.com\/ALazyMeme","followers_url":"https:\/\/api.github.com\/users\/ALazyMeme\/followers","following_url":"https:\/\/api.github.com\/users\/ALazyMeme\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ALazyMeme\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ALazyMeme\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ALazyMeme\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ALazyMeme\/orgs","repos_url":"https:\/\/api.github.com\/users\/ALazyMeme\/repos","events_url":"https:\/\/api.github.com\/users\/ALazyMeme\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ALazyMeme\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-05T03:49:45Z","updated_at":"2020-10-12T11:49:21Z","closed_at":"2020-10-12T11:49:21Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/714","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/714","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/714.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/714.patch","merged_at":null},"body":"This will keep dependencies up to date. This will require a pr label `dependencies` being created in order to function correctly.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/714\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/714\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/713","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/713\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/713\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/713\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/713","id":714475732,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk3NTUzOTUy","number":713,"title":"Fix reading text files with carriage return symbols","user":{"login":"mozharovsky","id":6762769,"node_id":"MDQ6VXNlcjY3NjI3Njk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6762769?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mozharovsky","html_url":"https:\/\/github.com\/mozharovsky","followers_url":"https:\/\/api.github.com\/users\/mozharovsky\/followers","following_url":"https:\/\/api.github.com\/users\/mozharovsky\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mozharovsky\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mozharovsky\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mozharovsky\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mozharovsky\/orgs","repos_url":"https:\/\/api.github.com\/users\/mozharovsky\/repos","events_url":"https:\/\/api.github.com\/users\/mozharovsky\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mozharovsky\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-05T03:07:03Z","updated_at":"2020-10-09T05:58:25Z","closed_at":"2020-10-05T13:49:29Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/713","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/713","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/713.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/713.patch","merged_at":null},"body":"The new pandas-based text reader isn't able to work properly with files that contain carriage return symbols (`\\r`). \r\n\r\nIt fails with the following error message:\r\n\r\n```\r\n...\r\n File \"pandas\/_libs\/parsers.pyx\", line 847, in pandas._libs.parsers.TextReader.read\r\n File \"pandas\/_libs\/parsers.pyx\", line 874, in pandas._libs.parsers.TextReader._read_low_memory\r\n File \"pandas\/_libs\/parsers.pyx\", line 918, in pandas._libs.parsers.TextReader._read_rows\r\n File \"pandas\/_libs\/parsers.pyx\", line 905, in pandas._libs.parsers.TextReader._tokenize_rows\r\n File \"pandas\/_libs\/parsers.pyx\", line 2042, in pandas._libs.parsers.raise_parser_error\r\npandas.errors.ParserError: Error tokenizing data. C error: Buffer overflow caught - possible malformed input file.\r\n```\r\n\r\n___\r\nI figured out the pandas uses those symbols as line terminators and this eventually causes the error. Explicitly specifying the `lineterminator` fixes that issue and everything works fine. \r\n\r\nPlease, consider this PR as it seems to be a common issue to solve.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/713\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/713\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/712","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/712\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/712\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/712\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/712","id":714242316,"node_id":"MDU6SXNzdWU3MTQyNDIzMTY=","number":712,"title":"Error in the notebooks\/Overview.ipynb notebook","user":{"login":"subhrm","id":850012,"node_id":"MDQ6VXNlcjg1MDAxMg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/850012?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/subhrm","html_url":"https:\/\/github.com\/subhrm","followers_url":"https:\/\/api.github.com\/users\/subhrm\/followers","following_url":"https:\/\/api.github.com\/users\/subhrm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/subhrm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/subhrm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/subhrm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/subhrm\/orgs","repos_url":"https:\/\/api.github.com\/users\/subhrm\/repos","events_url":"https:\/\/api.github.com\/users\/subhrm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/subhrm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-10-04T05:58:31Z","updated_at":"2020-10-05T16:25:40Z","closed_at":"2020-10-05T16:25:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI got the following error in **cell number 3** while exploring the **Overview.ipynb** notebook in google colab. I used the [link ](https:\/\/colab.research.google.com\/github\/huggingface\/datasets\/blob\/master\/notebooks\/Overview.ipynb) provided in the main README file to open it in colab. \r\n\r\n```python\r\n# You can access various attributes of the datasets before downloading them\r\nsquad_dataset = list_datasets()[datasets.index('squad')]\r\n\r\npprint(squad_dataset.__dict__) # It's a simple python dataclass\r\n```\r\n\r\nError message\r\n```\r\n---------------------------------------------------------------------------\r\nAttributeError Traceback (most recent call last)\r\n in ()\r\n 2 squad_dataset = list_datasets()[datasets.index('squad')]\r\n 3 \r\n ----> 4 pprint(squad_dataset.__dict__) # It's a simple python dataclass\r\n \r\nAttributeError: 'str' object has no attribute '__dict__'\r\n```\r\n\r\nThe object `squad_dataset` is a `str` not a `dataclass` .","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/712\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/712\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/711","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/711\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/711\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/711\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/711","id":714236408,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk3Mzc3NzU3","number":711,"title":"New Update bertscore.py","user":{"login":"PassionateLooker","id":51692618,"node_id":"MDQ6VXNlcjUxNjkyNjE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/51692618?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PassionateLooker","html_url":"https:\/\/github.com\/PassionateLooker","followers_url":"https:\/\/api.github.com\/users\/PassionateLooker\/followers","following_url":"https:\/\/api.github.com\/users\/PassionateLooker\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PassionateLooker\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PassionateLooker\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PassionateLooker\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PassionateLooker\/orgs","repos_url":"https:\/\/api.github.com\/users\/PassionateLooker\/repos","events_url":"https:\/\/api.github.com\/users\/PassionateLooker\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PassionateLooker\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-04T05:13:09Z","updated_at":"2020-10-05T16:26:51Z","closed_at":"2020-10-05T16:26:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/711","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/711","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/711.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/711.patch","merged_at":"2020-10-05T16:26:51Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/711\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/711\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/710","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/710\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/710\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/710\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/710","id":714186999,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk3MzQ1NjQ0","number":710,"title":"fix README typos\/ consistency","user":{"login":"discdiver","id":7703961,"node_id":"MDQ6VXNlcjc3MDM5NjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7703961?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/discdiver","html_url":"https:\/\/github.com\/discdiver","followers_url":"https:\/\/api.github.com\/users\/discdiver\/followers","following_url":"https:\/\/api.github.com\/users\/discdiver\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/discdiver\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/discdiver\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/discdiver\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/discdiver\/orgs","repos_url":"https:\/\/api.github.com\/users\/discdiver\/repos","events_url":"https:\/\/api.github.com\/users\/discdiver\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/discdiver\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-03T22:20:56Z","updated_at":"2020-10-17T09:52:45Z","closed_at":"2020-10-17T09:52:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/710","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/710","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/710.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/710.patch","merged_at":"2020-10-17T09:52:45Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/710\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/710\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/709","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/709\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/709\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/709\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/709","id":714067902,"node_id":"MDU6SXNzdWU3MTQwNjc5MDI=","number":709,"title":"How to use similarity settings other then \"BM25\" in Elasticsearch index ?","user":{"login":"nsankar","id":431890,"node_id":"MDQ6VXNlcjQzMTg5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/431890?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nsankar","html_url":"https:\/\/github.com\/nsankar","followers_url":"https:\/\/api.github.com\/users\/nsankar\/followers","following_url":"https:\/\/api.github.com\/users\/nsankar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nsankar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nsankar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nsankar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nsankar\/orgs","repos_url":"https:\/\/api.github.com\/users\/nsankar\/repos","events_url":"https:\/\/api.github.com\/users\/nsankar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nsankar\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-03T11:18:49Z","updated_at":"2021-07-18T19:02:55Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**QUESTION : How should we use other similarity algorithms supported by Elasticsearch other than \"BM25\" ?**\r\n**ES Reference**\r\nhttps:\/\/www.elastic.co\/guide\/en\/elasticsearch\/reference\/current\/index-modules-similarity.html\r\n**HF doc reference:**\r\nhttps:\/\/huggingface.co\/docs\/datasets\/faiss_and_ea.html\r\n\r\n**context :**\r\n========\r\n\r\nI used the latest Elasticsearch server version 7.9.2\r\nWhen I set DFR which is one of the other similarity algorithms supported by elasticsearch in the mapping, I get an error\r\n\r\nFor example DFR that I had tried in the first instance in mappings as below.,\r\n`\"mappings\": {\"properties\": {\"text\": {\"type\": \"text\", \"analyzer\": \"standard\", \"similarity\": \"DFR\"}}},`\r\n\r\nI get the following error \r\nRequestError: RequestError(400, 'mapper_parsing_exception', 'Unknown Similarity type [DFR] for field [text]')\r\n\r\nThe other thing as another option I had tried was to declare \"similarity\": \"my_similarity\" within settings and then assigning \"my_similarity\" inside the mappings as below \r\n\r\n`es_config = {\r\n \"settings\": {\r\n \"number_of_shards\": 1,\r\n **\"similarity\": \"my_similarity\"**: {\r\n \"type\": \"DFR\",\r\n \"basic_model\": \"g\",\r\n \"after_effect\": \"l\",\r\n \"normalization\": \"h2\",\r\n \"normalization.h2.c\": \"3.0\"\r\n } ,\r\n \"analysis\": {\"analyzer\": {\"stop_standard\": {\"type\": \"standard\", \" stopwords\": \"_english_\"}}},\r\n \r\n },\r\n \"mappings\": {\"properties\": {\"text\": {\"type\": \"text\", \"analyzer\": \"standard\", \"similarity\": \"my_similarity\"}}},\r\n }`\r\n\r\nFor this , I got the following error\r\nRequestError: RequestError(400, 'illegal_argument_exception', 'unknown setting [index.similarity] please check that any required plugins are installed, or check the breaking changes documentation for removed settings')\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/709\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/709\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/708","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/708\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/708\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/708\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/708","id":714020953,"node_id":"MDU6SXNzdWU3MTQwMjA5NTM=","number":708,"title":"Datasets performance slow? - 6.4x slower than in memory dataset","user":{"login":"eugeneware","id":38154,"node_id":"MDQ6VXNlcjM4MTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38154?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eugeneware","html_url":"https:\/\/github.com\/eugeneware","followers_url":"https:\/\/api.github.com\/users\/eugeneware\/followers","following_url":"https:\/\/api.github.com\/users\/eugeneware\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eugeneware\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eugeneware\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eugeneware\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eugeneware\/orgs","repos_url":"https:\/\/api.github.com\/users\/eugeneware\/repos","events_url":"https:\/\/api.github.com\/users\/eugeneware\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eugeneware\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-10-03T06:44:07Z","updated_at":"2021-02-12T14:13:28Z","closed_at":"2021-02-12T14:13:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I've been very excited about this amazing datasets project. However, I've noticed that the performance can be substantially slower than using an in-memory dataset.\r\n\r\nNow, this is expected I guess, due to memory mapping data using arrow files, and you don't get anything for free. But I was surprised at how much slower.\r\n\r\nFor example, in the `yelp_polarity` dataset (560000 datapoints, or 17500 batches of 32), it was taking me 3:31 to just get process the data and get it on the GPU (no model involved). Whereas, the equivalent in-memory dataset would finish in just 0:33.\r\n\r\nIs this expected? Given that one of the goals of this project is also accelerate dataset processing, this seems a bit slower than I would expect. I understand the advantages of being able to work on datasets that exceed memory, and that's very exciting to me, but thought I'd open this issue to discuss.\r\n\r\nFor reference I'm running a AMD Ryzen Threadripper 1900X 8-Core Processor CPU, with 128 GB of RAM and an NVME SSD Samsung 960 EVO. I'm running with an RTX Titan 24GB GPU.\r\n\r\nI can see with `iotop` that the dataset gets quickly loaded into the system read buffers, and thus doesn't incur any additional IO reads. Thus in theory, all the data *should* be in RAM, but in my benchmark code below it's still 6.4 times slower.\r\n\r\nWhat am I doing wrong? And is there a way to force the datasets to completely load into memory instead of being memory mapped in cases where you want maximum performance?\r\n\r\nAt 3:31 for 17500 batches, that's 12ms per batch. Does this 12ms just become insignificant as a proportion of forward and backward passes in practice, and thus it's not worth worrying about this in practice?\r\n\r\nIn any case, here's my code `benchmark.py`. If you run it with an argument of `memory` it will copy the data into memory before executing the same test.\r\n\r\n``` py\r\nimport sys\r\nfrom datasets import load_dataset\r\nfrom transformers import DataCollatorWithPadding, BertTokenizerFast\r\nfrom torch.utils.data import DataLoader\r\nfrom tqdm import tqdm\r\n\r\nif __name__ == '__main__':\r\n tokenizer = BertTokenizerFast.from_pretrained('bert-base-cased')\r\n collate_fn = DataCollatorWithPadding(tokenizer, padding=True)\r\n\r\n ds = load_dataset('yelp_polarity')\r\n\r\n def do_tokenize(x):\r\n return tokenizer(x['text'], truncation=True)\r\n\r\n ds = ds.map(do_tokenize, batched=True)\r\n ds.set_format('torch', ['input_ids', 'token_type_ids', 'attention_mask'])\r\n\r\n if len(sys.argv) == 2 and sys.argv[1] == 'memory':\r\n # copy to memory - probably a faster way to do this - but demonstrates the point\r\n # approximately 530 batches per second - 17500 batches in 0:33\r\n print('using memory')\r\n _ds = [data for data in tqdm(ds['train'])]\r\n else:\r\n # approximately 83 batches per second - 17500 batches in 3:31\r\n print('using datasets')\r\n _ds = ds['train']\r\n\r\n dl = DataLoader(_ds, shuffle=True, collate_fn=collate_fn, batch_size=32, num_workers=4)\r\n\r\n for data in tqdm(dl):\r\n for k, v in data.items():\r\n data[k] = v.to('cuda')\r\n```\r\n\r\nFor reference, my conda environment is [here](https:\/\/gist.github.com\/05b6101518ff70ed42a858b302a0405d)\r\n\r\nOnce again, I'm very excited about this library, and how easy it is to load datasets, and to do so without worrying about system memory constraints.\r\n\r\nThanks for all your great work.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/708\/reactions","total_count":4,"+1":4,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/708\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/707","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/707\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/707\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/707\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/707","id":713954666,"node_id":"MDU6SXNzdWU3MTM5NTQ2NjY=","number":707,"title":"Requirements should specify pyarrow<1","user":{"login":"mathcass","id":918541,"node_id":"MDQ6VXNlcjkxODU0MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918541?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mathcass","html_url":"https:\/\/github.com\/mathcass","followers_url":"https:\/\/api.github.com\/users\/mathcass\/followers","following_url":"https:\/\/api.github.com\/users\/mathcass\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mathcass\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mathcass\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mathcass\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mathcass\/orgs","repos_url":"https:\/\/api.github.com\/users\/mathcass\/repos","events_url":"https:\/\/api.github.com\/users\/mathcass\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mathcass\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-10-02T23:39:39Z","updated_at":"2020-12-04T08:22:39Z","closed_at":"2020-10-04T20:50:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I was looking at the docs on [Perplexity](https:\/\/huggingface.co\/transformers\/perplexity.html) via GPT2. When you load datasets and try to load Wikitext, you get the error,\r\n\r\n```\r\nmodule 'pyarrow' has no attribute 'PyExtensionType'\r\n```\r\nI traced it back to datasets having installed PyArrow 1.0.1 but there's not pinning in the setup file. \r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/e86a2a8f869b91654e782c9133d810bb82783200\/setup.py#L68\r\n\r\nDowngrading by installing `pip install \"pyarrow<1\"` resolved the issue.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/707\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/707\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/706","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/706\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/706\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/706\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/706","id":713721959,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2OTkwMDA0","number":706,"title":"Fix config creation for data files with NamedSplit","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-02T15:46:49Z","updated_at":"2020-10-05T08:15:00Z","closed_at":"2020-10-05T08:14:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/706","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/706","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/706.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/706.patch","merged_at":"2020-10-05T08:14:59Z"},"body":"During config creation, we need to iterate through the data files of all the splits to compute a hash.\r\nTo make sure the hash is unique given a certain combination of files\/splits, we sort the split names.\r\nHowever the `NamedSplit` objects can't be passed to `sorted` and currently it raises an error: we need to sort the string of their names instead.\r\n\r\nFix #705 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/706\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/706\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/705","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/705\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/705\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/705\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/705","id":713709100,"node_id":"MDU6SXNzdWU3MTM3MDkxMDA=","number":705,"title":"TypeError: '<' not supported between instances of 'NamedSplit' and 'NamedSplit'","user":{"login":"pvcastro","id":12713359,"node_id":"MDQ6VXNlcjEyNzEzMzU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12713359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pvcastro","html_url":"https:\/\/github.com\/pvcastro","followers_url":"https:\/\/api.github.com\/users\/pvcastro\/followers","following_url":"https:\/\/api.github.com\/users\/pvcastro\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pvcastro\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pvcastro\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pvcastro\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pvcastro\/orgs","repos_url":"https:\/\/api.github.com\/users\/pvcastro\/repos","events_url":"https:\/\/api.github.com\/users\/pvcastro\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pvcastro\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2020-10-02T15:27:55Z","updated_at":"2020-10-05T08:14:59Z","closed_at":"2020-10-05T08:14:59Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Environment info\r\n\r\n \r\n- `transformers` version: 3.3.1 (installed from master)\r\n- `datasets` version: 1.0.2 (installed as a dependency from transformers)\r\n- Platform: Linux-4.15.0-118-generic-x86_64-with-debian-stretch-sid\r\n- Python version: 3.7.9\r\n\r\nI'm testing my own text classification dataset using [this example](https:\/\/github.com\/huggingface\/transformers\/tree\/master\/examples\/text-classification#run-generic-text-classification-script-in-tensorflow) from transformers. The dataset is split into train \/ dev \/ test, and in csv format, containing just a text and a label columns, using comma as sep. Here's a sample:\r\n```\r\ntext,label\r\n\"Registra-se a presen\u00e7a do acad\u00eamico . Ao me deparar com a descri\u00e7\u00e3o de dois autores no polo ativo da a\u00e7\u00e3o junto ao PJe , margem esquerda foi informado pela procuradora do reclamante que se trata de uma reclama\u00e7\u00e3o trabalhista individual . Diante disso , face a aus\u00eancia injustificada do autor , determina-se o ARQUIVAMENTO do presente processo , com rela\u00e7\u00e3o a este , nos termos do [[ art . 844 da CLT ]] . CUSTAS AUTOR - DISPENSADO Custas pelo autor no importe de R $326,82 , calculadas sobre R $16.341,03 , dispensadas na forma da lei , em virtude da concess\u00e3o dos benef\u00edcios da Justi\u00e7a Gratuita , ora deferida . Cientes os presentes . Audi\u00eancia encerrada \u00e0s 8h42min . Ju\u00edza do Trabalho Ata redigida por << >> , Secret\u00e1rio de Audi\u00eancia .\",NO_RELATION\r\n```\r\n\r\nHowever, @Santosh-Gupta reported in #7351 that he had the exact same problem using the ChemProt dataset. His colab notebook is referenced in the following section.\r\n\r\n## To reproduce\r\n\r\nSteps to reproduce the behavior:\r\n\r\n1. Created a new conda environment using conda env -n transformers python=3.7\r\n2. Cloned transformers master, `cd` into it and installed using pip install --editable . -r examples\/requirements.txt \r\n3. Installed tensorflow with `pip install tensorflow`\r\n3. Ran `run_tf_text_classification.py` with the following parameters:\r\n\r\n```\r\n--train_file \/train.csv \\\r\n--dev_file \/dev.csv \\ \r\n--test_file \/test.csv \\\r\n--label_column_id 1 \\\r\n--model_name_or_path neuralmind\/bert-base-portuguese-cased \\\r\n--output_dir \\\r\n--num_train_epochs 4 \\\r\n--per_device_train_batch_size 4 \\\r\n--per_device_eval_batch_size 4 \\\r\n--do_train \\\r\n--do_eval \\\r\n--do_predict \\\r\n--logging_steps 1000 \\\r\n--evaluate_during_training \\\r\n--save_steps 1000 \\\r\n--overwrite_output_dir \\\r\n--overwrite_cache\r\n```\r\n\r\nI have also copied [@Santosh-Gupta 's colab notebook](https:\/\/colab.research.google.com\/drive\/11APei6GjphCZbH5wD9yVlfGvpIkh8pwr?usp=sharing) as a reference.\r\n\r\n\r\n\r\nHere is the stack trace:\r\n\r\n```\r\n2020-10-02 07:33:41.622011: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcudart.so.10.1\r\n\/media\/discoD\/repositorios\/transformers_pedro\/src\/transformers\/training_args.py:333: FutureWarning: The `evaluate_during_training` argument is deprecated in favor of `evaluation_strategy` (which has more options)\r\n FutureWarning,\r\n2020-10-02 07:33:43.471648: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcuda.so.1\r\n2020-10-02 07:33:43.471791: I tensorflow\/stream_executor\/cuda\/cuda_gpu_executor.cc:982] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\r\n2020-10-02 07:33:43.472664: I tensorflow\/core\/common_runtime\/gpu\/gpu_device.cc:1716] Found device 0 with properties: \r\npciBusID: 0000:01:00.0 name: GeForce GTX 1070 computeCapability: 6.1\r\ncoreClock: 1.7085GHz coreCount: 15 deviceMemorySize: 7.92GiB deviceMemoryBandwidth: 238.66GiB\/s\r\n2020-10-02 07:33:43.472684: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcudart.so.10.1\r\n2020-10-02 07:33:43.472765: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcublas.so.10\r\n2020-10-02 07:33:43.472809: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcufft.so.10\r\n2020-10-02 07:33:43.472848: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcurand.so.10\r\n2020-10-02 07:33:43.474209: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcusolver.so.10\r\n2020-10-02 07:33:43.474276: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcusparse.so.10\r\n2020-10-02 07:33:43.561219: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcudnn.so.7\r\n2020-10-02 07:33:43.561397: I tensorflow\/stream_executor\/cuda\/cuda_gpu_executor.cc:982] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\r\n2020-10-02 07:33:43.562345: I tensorflow\/stream_executor\/cuda\/cuda_gpu_executor.cc:982] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\r\n2020-10-02 07:33:43.563219: I tensorflow\/core\/common_runtime\/gpu\/gpu_device.cc:1858] Adding visible gpu devices: 0\r\n2020-10-02 07:33:43.563595: I tensorflow\/core\/platform\/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN)to use the following CPU instructions in performance-critical operations: AVX2 FMA\r\nTo enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\r\n2020-10-02 07:33:43.570091: I tensorflow\/core\/platform\/profile_utils\/cpu_utils.cc:104] CPU Frequency: 3591830000 Hz\r\n2020-10-02 07:33:43.570494: I tensorflow\/compiler\/xla\/service\/service.cc:168] XLA service 0x560842432400 initialized for platform Host (this does not guarantee that XLA will be used). Devices:\r\n2020-10-02 07:33:43.570511: I tensorflow\/compiler\/xla\/service\/service.cc:176] StreamExecutor device (0): Host, Default Version\r\n2020-10-02 07:33:43.570702: I tensorflow\/stream_executor\/cuda\/cuda_gpu_executor.cc:982] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\r\n2020-10-02 07:33:43.571599: I tensorflow\/core\/common_runtime\/gpu\/gpu_device.cc:1716] Found device 0 with properties: \r\npciBusID: 0000:01:00.0 name: GeForce GTX 1070 computeCapability: 6.1\r\ncoreClock: 1.7085GHz coreCount: 15 deviceMemorySize: 7.92GiB deviceMemoryBandwidth: 238.66GiB\/s\r\n2020-10-02 07:33:43.571633: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcudart.so.10.1\r\n2020-10-02 07:33:43.571645: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcublas.so.10\r\n2020-10-02 07:33:43.571654: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcufft.so.10\r\n2020-10-02 07:33:43.571664: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcurand.so.10\r\n2020-10-02 07:33:43.571691: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcusolver.so.10\r\n2020-10-02 07:33:43.571704: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcusparse.so.10\r\n2020-10-02 07:33:43.571718: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcudnn.so.7\r\n2020-10-02 07:33:43.571770: I tensorflow\/stream_executor\/cuda\/cuda_gpu_executor.cc:982] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\r\n2020-10-02 07:33:43.572641: I tensorflow\/stream_executor\/cuda\/cuda_gpu_executor.cc:982] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\r\n2020-10-02 07:33:43.573475: I tensorflow\/core\/common_runtime\/gpu\/gpu_device.cc:1858] Adding visible gpu devices: 0\r\n2020-10-02 07:33:47.139227: I tensorflow\/core\/common_runtime\/gpu\/gpu_device.cc:1257] Device interconnect StreamExecutor with strength 1 edge matrix:\r\n2020-10-02 07:33:47.139265: I tensorflow\/core\/common_runtime\/gpu\/gpu_device.cc:1263] 0 \r\n2020-10-02 07:33:47.139272: I tensorflow\/core\/common_runtime\/gpu\/gpu_device.cc:1276] 0: N \r\n2020-10-02 07:33:47.140323: I tensorflow\/stream_executor\/cuda\/cuda_gpu_executor.cc:982] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\r\n2020-10-02 07:33:47.141248: I tensorflow\/stream_executor\/cuda\/cuda_gpu_executor.cc:982] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\r\n2020-10-02 07:33:47.142085: I tensorflow\/stream_executor\/cuda\/cuda_gpu_executor.cc:982] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\r\n2020-10-02 07:33:47.142854: I tensorflow\/core\/common_runtime\/gpu\/gpu_device.cc:1402] Created TensorFlow device (\/job:localhost\/replica:0\/task:0\/device:GPU:0 with 5371 MB memory) -> physical GPU (device: 0, name: GeForce GTX 1070, pci bus id: 0000:01:00.0, compute capability: 6.1)\r\n2020-10-02 07:33:47.146317: I tensorflow\/compiler\/xla\/service\/service.cc:168] XLA service 0x5608b95dc5c0 initialized for platform CUDA (this does not guarantee that XLA will be used). Devices:\r\n2020-10-02 07:33:47.146336: I tensorflow\/compiler\/xla\/service\/service.cc:176] StreamExecutor device (0): GeForce GTX 1070, Compute Capability 6.1\r\n10\/02\/2020 07:33:47 - INFO - __main__ - n_replicas: 1, distributed training: False, 16-bits training: False\r\n10\/02\/2020 07:33:47 - INFO - __main__ - Training\/evaluation parameters TFTrainingArguments(output_dir='\/media\/discoD\/models\/datalawyer\/pedidos\/transformers_tf', overwrite_output_dir=True, do_train=True, do_eval=True, do_predict=True, evaluate_during_training=True, evaluation_strategy=, prediction_loss_only=False, per_device_train_batch_size=4, per_device_eval_batch_size=4, per_gpu_train_batch_size=None, per_gpu_eval_batch_size=None, gradient_accumulation_steps=1, learning_rate=5e-05, weight_decay=0.0, adam_beta1=0.9, adam_beta2=0.999, adam_epsilon=1e-08, max_grad_norm=1.0, num_train_epochs=4.0, max_steps=-1, warmup_steps=0, logging_dir='runs\/Oct02_07-33-43_user-XPS-8700', logging_first_step=False, logging_steps=1000, save_steps=1000, save_total_limit=None, no_cuda=False, seed=42, fp16=False, fp16_opt_level='O1', local_rank=-1, tpu_num_cores=None, tpu_metrics_debug=False, debug=False, dataloader_drop_last=False, eval_steps=1000, dataloader_num_workers=0, past_index=-1, run_name='\/media\/discoD\/models\/datalawyer\/pedidos\/transformers_tf', disable_tqdm=False, remove_unused_columns=True, label_names=None, load_best_model_at_end=False, metric_for_best_model=None, greater_is_better=False, tpu_name=None, xla=False)\r\n10\/02\/2020 07:33:53 - INFO - filelock - Lock 140407857405776 acquired on \/home\/user\/.cache\/huggingface\/datasets\/e0f1e9ed46db1e2429189f06b479cbd4075c0976104c1aacf8f77d9a53d2ad87.03756fef6da334f50a7ff73608e21b5018229944ca250416ce7352e25d84a552.py.lock\r\n10\/02\/2020 07:33:53 - INFO - filelock - Lock 140407857405776 released on \/home\/user\/.cache\/huggingface\/datasets\/e0f1e9ed46db1e2429189f06b479cbd4075c0976104c1aacf8f77d9a53d2ad87.03756fef6da334f50a7ff73608e21b5018229944ca250416ce7352e25d84a552.py.lock\r\nUsing custom data configuration default\r\nTraceback (most recent call last):\r\n File \"run_tf_text_classification.py\", line 283, in \r\n main()\r\n File \"run_tf_text_classification.py\", line 222, in main\r\n max_seq_length=data_args.max_seq_length,\r\n File \"run_tf_text_classification.py\", line 43, in get_tfds\r\n ds = datasets.load_dataset(\"csv\", data_files=files)\r\n File \"\/media\/discoD\/anaconda3\/envs\/transformers\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 604, in load_dataset\r\n **config_kwargs,\r\n File \"\/media\/discoD\/anaconda3\/envs\/transformers\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 158, in __init__\r\n **config_kwargs,\r\n File \"\/media\/discoD\/anaconda3\/envs\/transformers\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 269, in _create_builder_config\r\n for key in sorted(data_files.keys()):\r\nTypeError: '<' not supported between instances of 'NamedSplit' and 'NamedSplit'\r\n```\r\n\r\n## Expected behavior\r\n\r\nShould be able to run the text-classification example as described in [https:\/\/github.com\/huggingface\/transformers\/tree\/master\/examples\/text-classification#run-generic-text-classification-script-in-tensorflow](https:\/\/github.com\/huggingface\/transformers\/tree\/master\/examples\/text-classification#run-generic-text-classification-script-in-tensorflow)\r\n\r\nOriginally opened this issue at transformers' repository: [https:\/\/github.com\/huggingface\/transformers\/issues\/7535](https:\/\/github.com\/huggingface\/transformers\/issues\/7535). @jplu instructed me to open here, since according to [this](https:\/\/github.com\/huggingface\/transformers\/issues\/7535#issuecomment-702778885) evidence, the problem is from datasets.\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/705\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/705\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/704","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/704\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/704\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/704\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/704","id":713572556,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2ODY2NTQ0","number":704,"title":"Fix remote tests for new datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-02T12:08:04Z","updated_at":"2020-10-02T12:12:02Z","closed_at":"2020-10-02T12:12:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/704","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/704","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/704.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/704.patch","merged_at":"2020-10-02T12:12:01Z"},"body":"When adding a new dataset, the remote tests fail because they try to get the new dataset from the master branch (i.e., where the dataset doesn't exist yet)\r\nTo fix that I reverted to the use of the HF API that fetch the available datasets on S3 that is synced with the master branch","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/704\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/704\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/703","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/703\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/703\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/703\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/703","id":713559718,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2ODU1OTQ5","number":703,"title":"Add hotpot QA","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-10-02T11:44:28Z","updated_at":"2020-10-02T12:54:41Z","closed_at":"2020-10-02T12:54:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/703","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/703","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/703.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/703.patch","merged_at":"2020-10-02T12:54:40Z"},"body":"Added the [HotpotQA](https:\/\/github.com\/hotpotqa\/hotpot) multi-hop question answering dataset.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/703\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/703\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/702","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/702\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/702\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/702\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/702","id":713499628,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2ODA3Mjg4","number":702,"title":"Complete rouge kwargs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-02T09:59:01Z","updated_at":"2020-10-02T10:11:04Z","closed_at":"2020-10-02T10:11:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/702","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/702","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/702.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/702.patch","merged_at":"2020-10-02T10:11:03Z"},"body":"In #701 we noticed that some kwargs were missing for rouge","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/702\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/702\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/701","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/701\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/701\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/701\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/701","id":713485757,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2Nzk2MTQ1","number":701,"title":"Add rouge 2 and rouge Lsum to rouge metric outputs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-02T09:35:46Z","updated_at":"2020-10-02T09:55:14Z","closed_at":"2020-10-02T09:52:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/701","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/701","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/701.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/701.patch","merged_at":"2020-10-02T09:52:18Z"},"body":"Continuation of #700 \r\n\r\nRouge 2 and Rouge Lsum were missing in Rouge's outputs.\r\nRouge Lsum is also useful to evaluate Rouge L for sentences with `\\n`\r\n\r\nFix #617 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/701\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/701\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/700","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/700\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/700\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/700\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/700","id":713450295,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2NzY3MTMz","number":700,"title":"Add rouge-2 in rouge_types for metric calculation","user":{"login":"Shashi456","id":18056781,"node_id":"MDQ6VXNlcjE4MDU2Nzgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18056781?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Shashi456","html_url":"https:\/\/github.com\/Shashi456","followers_url":"https:\/\/api.github.com\/users\/Shashi456\/followers","following_url":"https:\/\/api.github.com\/users\/Shashi456\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Shashi456\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Shashi456\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Shashi456\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Shashi456\/orgs","repos_url":"https:\/\/api.github.com\/users\/Shashi456\/repos","events_url":"https:\/\/api.github.com\/users\/Shashi456\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Shashi456\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":13,"created_at":"2020-10-02T08:36:45Z","updated_at":"2020-10-02T11:08:49Z","closed_at":"2020-10-02T09:59:05Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/700","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/700","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/700.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/700.patch","merged_at":null},"body":"The description of the ROUGE metric says, \r\n```\r\n_KWARGS_DESCRIPTION = \"\"\"\r\nCalculates average rouge scores for a list of hypotheses and references\r\nArgs:\r\n predictions: list of predictions to score. Each predictions\r\n should be a string with tokens separated by spaces.\r\n references: list of reference for each prediction. Each\r\n reference should be a string with tokens separated by spaces.\r\nReturns:\r\n rouge1: rouge_1 f1,\r\n rouge2: rouge_2 f1,\r\n rougeL: rouge_l f1,\r\n rougeLsum: rouge_l precision\r\n\"\"\"\r\n```\r\n\r\nbut the `rouge_types` argument defaults to `rouge_types = [\"rouge1\", \"rougeL\"]`, this PR updates and add `rouge2` to the list so as to reflect the description card.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/700\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/700\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/699","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/699\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/699\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/699\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/699","id":713395642,"node_id":"MDU6SXNzdWU3MTMzOTU2NDI=","number":699,"title":"XNLI dataset is not loading ","user":{"login":"imadarsh1001","id":14936525,"node_id":"MDQ6VXNlcjE0OTM2NTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14936525?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/imadarsh1001","html_url":"https:\/\/github.com\/imadarsh1001","followers_url":"https:\/\/api.github.com\/users\/imadarsh1001\/followers","following_url":"https:\/\/api.github.com\/users\/imadarsh1001\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/imadarsh1001\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/imadarsh1001\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/imadarsh1001\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/imadarsh1001\/orgs","repos_url":"https:\/\/api.github.com\/users\/imadarsh1001\/repos","events_url":"https:\/\/api.github.com\/users\/imadarsh1001\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/imadarsh1001\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-10-02T06:53:16Z","updated_at":"2020-10-03T17:45:52Z","closed_at":"2020-10-03T17:43:37Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`dataset = datasets.load_dataset(path='xnli')`\r\n\r\nshowing below error \r\n```\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/nlp\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 36 if len(bad_urls) > 0:\r\n 37 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 38 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 39 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 40 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/www.nyu.edu\/projects\/bowman\/xnli\/XNLI-1.0.zip']\r\n```\r\n\r\nI think URL is now changed to \"https:\/\/cims.nyu.edu\/~sbowman\/xnli\/XNLI-MT-1.0.zip\"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/699\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/699\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/697","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/697\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/697\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/697\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/697","id":712979029,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2MzczNDU5","number":697,"title":"Update README.md","user":{"login":"bishug","id":71011306,"node_id":"MDQ6VXNlcjcxMDExMzA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/71011306?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bishug","html_url":"https:\/\/github.com\/bishug","followers_url":"https:\/\/api.github.com\/users\/bishug\/followers","following_url":"https:\/\/api.github.com\/users\/bishug\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bishug\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bishug\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bishug\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bishug\/orgs","repos_url":"https:\/\/api.github.com\/users\/bishug\/repos","events_url":"https:\/\/api.github.com\/users\/bishug\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bishug\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-01T16:02:42Z","updated_at":"2020-10-01T16:12:00Z","closed_at":"2020-10-01T16:12:00Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/697","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/697","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/697.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/697.patch","merged_at":null},"body":"Hey I was just telling my subscribers to check out your repositories \r\nThank you","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/697\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/697\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/696","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/696\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/696\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/696\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/696","id":712942977,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2MzQzMjEy","number":696,"title":"Elasticsearch index docs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-01T15:18:58Z","updated_at":"2020-10-02T07:48:19Z","closed_at":"2020-10-02T07:48:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/696","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/696","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/696.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/696.patch","merged_at":"2020-10-02T07:48:18Z"},"body":"I added the docs for ES indexes.\r\n\r\nI also added a `load_elasticsearch_index` method to load an index that has already been built.\r\n\r\nI checked the tests for the ES index and we have tests that mock ElasticSearch.\r\nI think this is good for now but at some point it would be cool to have an end-to-end test with a real ES running.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/696\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/696\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/695","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/695\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/695\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/695\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/695","id":712843949,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2MjU5NTM0","number":695,"title":"Update XNLI download link","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-01T13:27:22Z","updated_at":"2020-10-01T14:01:15Z","closed_at":"2020-10-01T14:01:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/695","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/695","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/695.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/695.patch","merged_at":"2020-10-01T14:01:14Z"},"body":"The old link isn't working anymore. I updated it with the new official link.\r\nFix #690 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/695\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/695\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/694","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/694\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/694\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/694\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/694","id":712827751,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2MjQ1NzU0","number":694,"title":"Use GitHub instead of aws in remote dataset tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-01T13:07:50Z","updated_at":"2020-10-02T07:47:28Z","closed_at":"2020-10-02T07:47:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/694","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/694","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/694.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/694.patch","merged_at":"2020-10-02T07:47:26Z"},"body":"Recently we switched from aws s3 to github to download dataset scripts.\r\nHowever in the tests, the dummy data were still downloaded from s3.\r\nSo I changed that to download them from github instead, in the MockDownloadManager.\r\n\r\nMoreover I noticed that `anli`'s dummy data were quite heavy (18MB compressed, i.e. the entire dataset) so I replaced them with dummy data with few examples.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/694\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/694\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/693","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/693\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/693\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/693\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/693","id":712822200,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2MjQxMjUw","number":693,"title":"Rachel ker add dataset\/mlsum","user":{"login":"pdhg","id":32742136,"node_id":"MDQ6VXNlcjMyNzQyMTM2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32742136?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pdhg","html_url":"https:\/\/github.com\/pdhg","followers_url":"https:\/\/api.github.com\/users\/pdhg\/followers","following_url":"https:\/\/api.github.com\/users\/pdhg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pdhg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pdhg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pdhg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pdhg\/orgs","repos_url":"https:\/\/api.github.com\/users\/pdhg\/repos","events_url":"https:\/\/api.github.com\/users\/pdhg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pdhg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-10-01T13:01:10Z","updated_at":"2020-10-01T17:01:13Z","closed_at":"2020-10-01T17:01:13Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/693","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/693","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/693.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/693.patch","merged_at":null},"body":".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/693\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/693\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/692","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/692\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/692\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/692\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/692","id":712818968,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk2MjM4NzIw","number":692,"title":"Update README.md","user":{"login":"mayank1897","id":62796466,"node_id":"MDQ6VXNlcjYyNzk2NDY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/62796466?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mayank1897","html_url":"https:\/\/github.com\/mayank1897","followers_url":"https:\/\/api.github.com\/users\/mayank1897\/followers","following_url":"https:\/\/api.github.com\/users\/mayank1897\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mayank1897\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mayank1897\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mayank1897\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mayank1897\/orgs","repos_url":"https:\/\/api.github.com\/users\/mayank1897\/repos","events_url":"https:\/\/api.github.com\/users\/mayank1897\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mayank1897\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-10-01T12:57:22Z","updated_at":"2020-10-02T11:01:59Z","closed_at":"2020-10-02T11:01:59Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/692","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/692","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/692.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/692.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/692\/reactions","total_count":6,"+1":0,"-1":4,"laugh":0,"hooray":0,"confused":2,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/692\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/691","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/691\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/691\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/691\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/691","id":712389499,"node_id":"MDU6SXNzdWU3MTIzODk0OTk=","number":691,"title":"Add UI filter to filter datasets based on task","user":{"login":"praateekmahajan","id":7589415,"node_id":"MDQ6VXNlcjc1ODk0MTU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7589415?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/praateekmahajan","html_url":"https:\/\/github.com\/praateekmahajan","followers_url":"https:\/\/api.github.com\/users\/praateekmahajan\/followers","following_url":"https:\/\/api.github.com\/users\/praateekmahajan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/praateekmahajan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/praateekmahajan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/praateekmahajan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/praateekmahajan\/orgs","repos_url":"https:\/\/api.github.com\/users\/praateekmahajan\/repos","events_url":"https:\/\/api.github.com\/users\/praateekmahajan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/praateekmahajan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-10-01T00:56:18Z","updated_at":"2020-10-27T15:24:30Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This is great work, so huge shoutout to contributors and huggingface.\r\n\r\nThe [\/nlp\/viewer](https:\/\/huggingface.co\/nlp\/viewer\/) is great and the [\/datasets](https:\/\/huggingface.co\/datasets) page is great. I was wondering if in both or either places we can have a filter that selects if a dataset is good for the following tasks (non exhaustive list)\r\n\r\n- Classification\r\n\t- Multi label\r\n\t- Multi class\r\n- Q&A\r\n- Summarization\r\n- Translation\r\n\r\nI believe this feature might have some value, for folks trying to find datasets for a particular task, and then testing their model capabilities.\r\n\r\nThank you :) ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/691\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/691\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/690","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/690\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/690\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/690\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/690","id":712150321,"node_id":"MDU6SXNzdWU3MTIxNTAzMjE=","number":690,"title":"XNLI dataset: NonMatchingChecksumError","user":{"login":"xiey1","id":13307358,"node_id":"MDQ6VXNlcjEzMzA3MzU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13307358?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/xiey1","html_url":"https:\/\/github.com\/xiey1","followers_url":"https:\/\/api.github.com\/users\/xiey1\/followers","following_url":"https:\/\/api.github.com\/users\/xiey1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/xiey1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/xiey1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/xiey1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/xiey1\/orgs","repos_url":"https:\/\/api.github.com\/users\/xiey1\/repos","events_url":"https:\/\/api.github.com\/users\/xiey1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/xiey1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-09-30T17:50:03Z","updated_at":"2020-10-01T17:15:08Z","closed_at":"2020-10-01T14:01:14Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI tried to download \"xnli\" dataset in colab using \r\n`xnli = load_dataset(path='xnli')`\r\nbut got 'NonMatchingChecksumError' error\r\n\r\n`NonMatchingChecksumError Traceback (most recent call last)\r\n in ()\r\n----> 1 xnli = load_dataset(path='xnli')\r\n\r\n3 frames\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 37 if len(bad_urls) > 0:\r\n 38 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 39 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 40 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 41 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/www.nyu.edu\/projects\/bowman\/xnli\/XNLI-1.0.zip']`\r\n\r\nThe same code worked well several days ago in colab but stopped working now. Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/690\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/690\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/689","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/689\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/689\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/689\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/689","id":712095262,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk1NjMzNjMy","number":689,"title":"Switch to pandas reader for text dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-30T16:28:12Z","updated_at":"2020-09-30T16:45:32Z","closed_at":"2020-09-30T16:45:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/689","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/689","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/689.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/689.patch","merged_at":"2020-09-30T16:45:31Z"},"body":"Following the discussion in #622 , it appears that there's no appropriate ways to use the payrrow csv reader to read text files because of the separator.\r\n\r\nIn this PR I switched to pandas to read the file.\r\n\r\nMoreover pandas allows to read the file by chunk, which means that you can build the arrow dataset from a text file that is bigger than RAM (we used to have to shard text files an mentioned in https:\/\/github.com\/huggingface\/datasets\/issues\/610#issuecomment-691672919)\r\n\r\nFrom a test that I did locally on a 1GB text file, the pyarrow reader used to run in 150ms while the new one takes 650ms (multithreading off for pyarrow). This is probably due to chunking since I am having the same speed difference by calling `read()` and calling `read(chunksize)` + `readline()` to read the text file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/689\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/689\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/688","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/688\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/688\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/688\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/688","id":711804828,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk1MzkwMTc1","number":688,"title":"Disable tokenizers parallelism in multiprocessed map","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-30T09:53:34Z","updated_at":"2020-10-01T08:45:46Z","closed_at":"2020-10-01T08:45:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/688","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/688","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/688.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/688.patch","merged_at":"2020-10-01T08:45:45Z"},"body":"It was reported in #620 that using multiprocessing with a tokenizers shows this message:\r\n```\r\nThe current process just got forked. Disabling parallelism to avoid deadlocks...\r\nTo disable this warning, please explicitly set TOKENIZERS_PARALLELISM=(true | false)\r\n```\r\nThis message is shown when TOKENIZERS_PARALLELISM is unset.\r\nMoreover if it is set to `true`, then the program just hangs.\r\n\r\nTo hide the message (if TOKENIZERS_PARALLELISM is unset) and avoid hanging (if TOKENIZERS_PARALLELISM is `true`), then I set TOKENIZERS_PARALLELISM to `false` when forking the process. After forking is gets back to its original value.\r\n\r\nAlso I added a warning if TOKENIZERS_PARALLELISM was `true` and is set to `false`:\r\n```\r\nSetting TOKENIZERS_PARALLELISM=false for forked processes.\r\n```\r\n\r\ncc @n1t0 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/688\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/688\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/687","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/687\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/687\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/687\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/687","id":711664810,"node_id":"MDU6SXNzdWU3MTE2NjQ4MTA=","number":687,"title":"`ArrowInvalid` occurs while running `Dataset.map()` function","user":{"login":"peinan","id":5601012,"node_id":"MDQ6VXNlcjU2MDEwMTI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5601012?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/peinan","html_url":"https:\/\/github.com\/peinan","followers_url":"https:\/\/api.github.com\/users\/peinan\/followers","following_url":"https:\/\/api.github.com\/users\/peinan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/peinan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/peinan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/peinan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/peinan\/orgs","repos_url":"https:\/\/api.github.com\/users\/peinan\/repos","events_url":"https:\/\/api.github.com\/users\/peinan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/peinan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-30T06:16:50Z","updated_at":"2020-09-30T09:53:03Z","closed_at":"2020-09-30T09:53:03Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It seems to fail to process the final batch. This [colab](https:\/\/colab.research.google.com\/drive\/1_byLZRHwGP13PHMkJWo62Wp50S_Z2HMD?usp=sharing) can reproduce the error.\r\n\r\nCode:\r\n\r\n```python\r\n# train_ds = Dataset(features: {\r\n# 'title': Value(dtype='string', id=None), \r\n# 'score': Value(dtype='float64', id=None)\r\n# }, num_rows: 99999)\r\n\r\n# suggested in #665 \r\nclass PicklableTokenizer(BertJapaneseTokenizer):\r\n def __getstate__(self):\r\n state = dict(self.__dict__)\r\n state['do_lower_case'] = self.word_tokenizer.do_lower_case\r\n state['never_split'] = self.word_tokenizer.never_split\r\n del state['word_tokenizer']\r\n return state\r\n \r\n def __setstate(self):\r\n do_lower_case = state.pop('do_lower_case')\r\n never_split = state.pop('never_split')\r\n self.__dict__ = state\r\n self.word_tokenizer = MecabTokenizer(\r\n do_lower_case=do_lower_case, never_split=never_split\r\n )\r\n\r\nt = PicklableTokenizer.from_pretrained('bert-base-japanese-whole-word-masking')\r\n\r\nencoded = train_ds.map(\r\n lambda examples: {'tokens': t.encode(examples['title'], max_length=1000)}, batched=True, batch_size=1000\r\n)\r\n```\r\n\r\nError Message:\r\n\r\n```\r\n 99% 99\/100 [00:22<00:00, 39.07ba\/s]\r\n---------------------------------------------------------------------------\r\nArrowInvalid Traceback (most recent call last)\r\n in \r\n\r\n\/usr\/local\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)\r\n 1242 fn_kwargs=fn_kwargs,\r\n 1243 new_fingerprint=new_fingerprint,\r\n-> 1244 update_data=update_data,\r\n 1245 )\r\n 1246 else:\r\n\r\n\/usr\/local\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 151 \"output_all_columns\": self._output_all_columns,\r\n 152 }\r\n--> 153 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 154 if new_format[\"columns\"] is not None:\r\n 155 new_format[\"columns\"] = list(set(new_format[\"columns\"]) & set(out.column_names))\r\n\r\n\/usr\/local\/lib\/python3.6\/site-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 161 # Call actual function\r\n 162 \r\n--> 163 out = func(self, *args, **kwargs)\r\n 164 \r\n 165 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n\/usr\/local\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, update_data)\r\n 1496 if update_data:\r\n 1497 batch = cast_to_python_objects(batch)\r\n-> 1498 writer.write_batch(batch)\r\n 1499 if update_data:\r\n 1500 writer.finalize() # close_stream=bool(buf_writer is None)) # We only close if we are writing in a file\r\n\r\n\/usr\/local\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py in write_batch(self, batch_examples, writer_batch_size)\r\n 271 typed_sequence = TypedSequence(batch_examples[col], type=col_type, try_type=col_try_type)\r\n 272 typed_sequence_examples[col] = typed_sequence\r\n--> 273 pa_table = pa.Table.from_pydict(typed_sequence_examples)\r\n 274 self.write_table(pa_table)\r\n 275 \r\n\r\n\/usr\/local\/lib\/python3.6\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.from_pydict()\r\n\r\n\/usr\/local\/lib\/python3.6\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.from_arrays()\r\n\r\n\/usr\/local\/lib\/python3.6\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.validate()\r\n\r\n\/usr\/local\/lib\/python3.6\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowInvalid: Column 4 named tokens expected length 999 but got length 1000\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/687\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/687\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/686","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/686\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/686\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/686\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/686","id":711385739,"node_id":"MDU6SXNzdWU3MTEzODU3Mzk=","number":686,"title":"Dataset browser url is still https:\/\/huggingface.co\/nlp\/viewer\/","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-29T19:21:52Z","updated_at":"2021-01-08T18:29:26Z","closed_at":"2021-01-08T18:29:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Might be worth updating to https:\/\/huggingface.co\/datasets\/viewer\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/686\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/686\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/685","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/685\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/685\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/685\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/685","id":711182185,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk0ODg1NjIz","number":685,"title":"Add features parameter to CSV","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-29T14:43:36Z","updated_at":"2020-09-30T08:39:56Z","closed_at":"2020-09-30T08:39:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/685","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/685","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/685.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/685.patch","merged_at":"2020-09-30T08:39:54Z"},"body":"Add support for the `features` parameter when loading a csv dataset:\r\n\r\n```python\r\nfrom datasets import load_dataset, Features\r\n\r\nfeatures = Features({...})\r\ncsv_dataset = load_dataset(\"csv\", data_files=[\"path\/to\/my\/file.csv\"], features=features)\r\n```\r\n\r\nI added tests to make sure that it is also compatible with the caching system\r\n\r\nFix #623 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/685\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/685\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/684","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/684\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/684\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/684\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/684","id":711080947,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk0ODA2NjE1","number":684,"title":"Fix column order issue in cast","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-29T12:49:13Z","updated_at":"2020-09-29T15:56:46Z","closed_at":"2020-09-29T15:56:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/684","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/684","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/684.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/684.patch","merged_at":"2020-09-29T15:56:45Z"},"body":"Previously, the order of the columns in the features passes to `cast_` mattered.\r\nHowever even though features passed to `cast_` had the same order as the dataset features, it could fail because the schema that was built was always in alphabetical order.\r\nThis issue was reported by @lewtun in #623 \r\n\r\nTo fix that I fixed the schema to follow the order of the arrow table columns.\r\nI also added the possibility to give features that are not ordered the same way as the dataset features.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/684\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/684\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/683","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/683\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/683\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/683\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/683","id":710942704,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk0NzAwNzY1","number":683,"title":"Fix wrong delimiter in text dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-29T09:43:24Z","updated_at":"2021-05-05T18:24:31Z","closed_at":"2020-09-29T09:44:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/683","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/683","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/683.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/683.patch","merged_at":null},"body":"The delimiter is set to the bell character as it is used nowhere is text files usually.\r\nHowever in the text dataset the delimiter was set to `\\b` which is backspace in python, while the bell character is `\\a`.\r\nI replace \\b by \\a\r\n\r\nHopefully it fixes issues mentioned by some users in #622 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/683\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/683\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/682","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/682\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/682\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/682\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/682","id":710325399,"node_id":"MDExOlB1bGxSZXF1ZXN0NDk0MTkzMzEw","number":682,"title":"Update navbar chapter titles color","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-28T14:35:17Z","updated_at":"2020-09-28T17:30:13Z","closed_at":"2020-09-28T17:30:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/682","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/682","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/682.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/682.patch","merged_at":"2020-09-28T17:30:12Z"},"body":"Consistency with the color change that was done in transformers at https:\/\/github.com\/huggingface\/transformers\/pull\/7423\r\nIt makes the background-color of the chapter titles in the docs navbar darker, to differentiate them from the inner sections.\r\n\r\nsee changes [here](https:\/\/691-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/index.html)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/682\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/682\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/681","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/681\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/681\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/681\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/681","id":710075721,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkzOTkwMjEz","number":681,"title":"Adding missing @property (+2 small flake8 fixes).","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-28T08:53:53Z","updated_at":"2020-09-28T10:26:13Z","closed_at":"2020-09-28T10:26:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/681","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/681","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/681.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/681.patch","merged_at":"2020-09-28T10:26:09Z"},"body":"Fixes #678","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/681\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/681\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/680","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/680\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/680\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/680\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/680","id":710066138,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkzOTgyMjY4","number":680,"title":"Fix bug related to boolean in GAP dataset.","user":{"login":"otakumesi","id":14996977,"node_id":"MDQ6VXNlcjE0OTk2OTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14996977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/otakumesi","html_url":"https:\/\/github.com\/otakumesi","followers_url":"https:\/\/api.github.com\/users\/otakumesi\/followers","following_url":"https:\/\/api.github.com\/users\/otakumesi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/otakumesi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/otakumesi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/otakumesi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/otakumesi\/orgs","repos_url":"https:\/\/api.github.com\/users\/otakumesi\/repos","events_url":"https:\/\/api.github.com\/users\/otakumesi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/otakumesi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-28T08:39:39Z","updated_at":"2020-09-29T15:54:47Z","closed_at":"2020-09-29T15:54:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/680","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/680","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/680.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/680.patch","merged_at":"2020-09-29T15:54:47Z"},"body":"### Why I did\r\nThe value in `row[\"A-coref\"]` and `row[\"B-coref\"]` is `'TRUE'` or `'FALSE'`.\r\nThis type is `string`, then `bool('FALSE')` is equal to `True` in Python.\r\nSo, both rows are transformed into `True` now.\r\n\r\nSo, I modified this problem.\r\n\r\n### What I did\r\nI modified `bool(row[\"A-coref\"])` and `bool(row[\"B-coref\"])` to `row[\"A-coref\"] == \"TRUE\"` and `row[\"B-coref\"] == \"TRUE\"`.\r\n\r\nThank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/680\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/680\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/679","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/679\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/679\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/679\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/679","id":710065838,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkzOTgyMDMx","number":679,"title":"Fix negative ids when slicing with an array","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-28T08:39:08Z","updated_at":"2020-09-28T14:42:20Z","closed_at":"2020-09-28T14:42:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/679","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/679","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/679.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/679.patch","merged_at":"2020-09-28T14:42:19Z"},"body":"```python\r\nfrom datasets import Dataset\r\n\r\nd = ds.Dataset.from_dict({\"a\": range(10)})\r\nprint(d[[0, -1]])\r\n# OverflowError\r\n```\r\n\r\nraises an error because of the negative id.\r\n\r\nThis PR fixes that.\r\nFix #668 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/679\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/679\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/678","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/678\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/678\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/678\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/678","id":710060497,"node_id":"MDU6SXNzdWU3MTAwNjA0OTc=","number":678,"title":"The download instructions for c4 datasets are not contained in the error message","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-28T08:30:54Z","updated_at":"2020-09-28T10:26:09Z","closed_at":"2020-09-28T10:26:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The manual download instructions are not clear \r\n```The dataset c4 with config en requires manual data. \r\n Please follow the manual download instructions: >. \r\n Manual data can be loaded with `datasets.load_dataset(c4, data_dir='')\r\n```\r\n\r\nEither `@property` could be added to C4.manual_download_instrcutions (or make it a real property), or the manual_download_instructions function needs to be called I think.\r\n\r\nLet me know if you want a PR for this, but I'm not sure which possible fix is the correct one.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/678\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/678\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/677","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/677\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/677\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/677\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/677","id":710055239,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkzOTczNDE3","number":677,"title":"Move cache dir root creation in builder's init","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-28T08:22:46Z","updated_at":"2020-09-28T14:42:43Z","closed_at":"2020-09-28T14:42:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/677","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/677","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/677.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/677.patch","merged_at":"2020-09-28T14:42:42Z"},"body":"We use lock files in the builder initialization but sometimes the cache directory where they're supposed to be was not created. To fix that I moved the builder's cache dir root creation in the builder's init.\r\n\r\nFix #671 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/677\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/677\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/676","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/676\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/676\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/676\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/676","id":710014319,"node_id":"MDU6SXNzdWU3MTAwMTQzMTk=","number":676,"title":"train_test_split returns empty dataset item","user":{"login":"mojave-pku","id":26648528,"node_id":"MDQ6VXNlcjI2NjQ4NTI4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26648528?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mojave-pku","html_url":"https:\/\/github.com\/mojave-pku","followers_url":"https:\/\/api.github.com\/users\/mojave-pku\/followers","following_url":"https:\/\/api.github.com\/users\/mojave-pku\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mojave-pku\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mojave-pku\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mojave-pku\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mojave-pku\/orgs","repos_url":"https:\/\/api.github.com\/users\/mojave-pku\/repos","events_url":"https:\/\/api.github.com\/users\/mojave-pku\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mojave-pku\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-09-28T07:19:33Z","updated_at":"2020-10-07T13:46:33Z","closed_at":"2020-10-07T13:38:06Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I try to split my dataset by `train_test_split`, but after that the item in `train` and `test` `Dataset` is empty.\r\nThe codes:\r\n```\r\nyelp_data = datasets.load_from_disk('\/home\/ssd4\/huanglianzhe\/test_yelp')\r\n print(yelp_data[0])\r\n yelp_data = yelp_data.train_test_split(test_size=0.1)\r\n print(yelp_data)\r\n print(yelp_data['test'])\r\n print(yelp_data['test'][0])\r\n```\r\nThe outputs:\r\n```\r\n{'stars': 2.0, 'text': 'xxxx'}\r\nLoading cached split indices for dataset at \/home\/ssd4\/huanglianzhe\/test_yelp\/cache-f9b22d8b9d5a7346.arrow and \/home\/ssd4\/huanglianzhe\/test_yelp\/cache-4aa26fa4005059d1.arrow\r\nDatasetDict({'train': Dataset(features: {'stars': Value(dtype='float64', id=None), 'text': Value(dtype='string', id=None)}, num_rows: 7219009), 'test': Dataset(features: {'stars': Value(dtype='float64', id=None), 'text': Value(dtype='string', id=None)}, num_rows: 802113)})\r\nDataset(features: {'stars': Value(dtype='float64', id=None), 'text': Value(dtype='string', id=None)}, num_rows: 802113)\r\n{} # yelp_data['test'][0] is empty\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/676\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/676\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/675","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/675\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/675\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/675\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/675","id":709818725,"node_id":"MDU6SXNzdWU3MDk4MTg3MjU=","number":675,"title":"Add custom dataset to NLP?","user":{"login":"timpal0l","id":6556710,"node_id":"MDQ6VXNlcjY1NTY3MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6556710?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timpal0l","html_url":"https:\/\/github.com\/timpal0l","followers_url":"https:\/\/api.github.com\/users\/timpal0l\/followers","following_url":"https:\/\/api.github.com\/users\/timpal0l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timpal0l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timpal0l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timpal0l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timpal0l\/orgs","repos_url":"https:\/\/api.github.com\/users\/timpal0l\/repos","events_url":"https:\/\/api.github.com\/users\/timpal0l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timpal0l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-27T21:22:50Z","updated_at":"2020-10-20T09:08:49Z","closed_at":"2020-10-20T09:08:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Is it possible to add a custom dataset such as a .csv to the NLP library?\r\n\r\nThanks.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/675\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/675\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/674","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/674\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/674\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/674\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/674","id":709661006,"node_id":"MDU6SXNzdWU3MDk2NjEwMDY=","number":674,"title":"load_dataset() won't download in Windows","user":{"login":"ThisDavehead","id":34422661,"node_id":"MDQ6VXNlcjM0NDIyNjYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34422661?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ThisDavehead","html_url":"https:\/\/github.com\/ThisDavehead","followers_url":"https:\/\/api.github.com\/users\/ThisDavehead\/followers","following_url":"https:\/\/api.github.com\/users\/ThisDavehead\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ThisDavehead\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ThisDavehead\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ThisDavehead\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ThisDavehead\/orgs","repos_url":"https:\/\/api.github.com\/users\/ThisDavehead\/repos","events_url":"https:\/\/api.github.com\/users\/ThisDavehead\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ThisDavehead\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-09-27T03:56:25Z","updated_at":"2020-10-05T08:28:18Z","closed_at":"2020-10-05T08:28:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I don't know if this is just me or Windows. Maybe other Windows users can chime in if they don't have this problem. I've been trying to get some of the tutorials working on Windows, but when I use the load_dataset() function, it just stalls and the script keeps running indefinitely without downloading anything. I've waited upwards of 18 hours to download the 'multi-news' dataset (which isn't very big), and still nothing. I've tried running it through different IDE's and the command line, but it had the same behavior. I've also tried it with all virus and malware protection turned off. I've made sure python and all IDE's are exceptions to the firewall and all the requisite permissions are enabled.\r\n\r\nAdditionally, I checked to see if other packages could download content such as an nltk corpus, and they could. I've also run the same script using Ubuntu and it downloaded fine (and quickly). When I copied the downloaded datasets from my Ubuntu drive to my Windows .cache folder it worked fine by reusing the already-downloaded dataset, but it's cumbersome to do that for every dataset I want to try in my Windows environment.\r\n\r\nCould this be a bug, or is there something I'm doing wrong or not thinking of?\r\n\r\nThanks.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/674\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/674\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/673","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/673\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/673\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/673\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/673","id":709603989,"node_id":"MDU6SXNzdWU3MDk2MDM5ODk=","number":673,"title":"blog_authorship_corpus crashed","user":{"login":"Moshiii","id":7553188,"node_id":"MDQ6VXNlcjc1NTMxODg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7553188?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Moshiii","html_url":"https:\/\/github.com\/Moshiii","followers_url":"https:\/\/api.github.com\/users\/Moshiii\/followers","following_url":"https:\/\/api.github.com\/users\/Moshiii\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Moshiii\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Moshiii\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Moshiii\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Moshiii\/orgs","repos_url":"https:\/\/api.github.com\/users\/Moshiii\/repos","events_url":"https:\/\/api.github.com\/users\/Moshiii\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Moshiii\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-26T20:15:28Z","updated_at":"2020-09-28T08:04:50Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This is just to report that When I pick blog_authorship_corpus in \r\nhttps:\/\/huggingface.co\/nlp\/viewer\/?dataset=blog_authorship_corpus\r\nI get this:\r\n![image](https:\/\/user-images.githubusercontent.com\/7553188\/94349542-4364f300-0013-11eb-897d-b25660a449f0.png)\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/673\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/673\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/672","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/672\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/672\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/672\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/672","id":709575527,"node_id":"MDU6SXNzdWU3MDk1NzU1Mjc=","number":672,"title":"Questions about XSUM ","user":{"login":"danyaljj","id":2441454,"node_id":"MDQ6VXNlcjI0NDE0NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2441454?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danyaljj","html_url":"https:\/\/github.com\/danyaljj","followers_url":"https:\/\/api.github.com\/users\/danyaljj\/followers","following_url":"https:\/\/api.github.com\/users\/danyaljj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danyaljj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danyaljj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danyaljj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danyaljj\/orgs","repos_url":"https:\/\/api.github.com\/users\/danyaljj\/repos","events_url":"https:\/\/api.github.com\/users\/danyaljj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danyaljj\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":13,"created_at":"2020-09-26T17:16:24Z","updated_at":"2020-10-20T09:16:07Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi there \u270b \r\n\r\nI'm looking into your `xsum` dataset and I have several questions on that. \r\nSo here is how I loaded the data: \r\n```\r\n>>> data = datasets.load_dataset('xsum', version='1.0.1')\r\n>>> data['train']\r\nDataset(features: {'document': Value(dtype='string', id=None), 'summary': Value(dtype='string', id=None)}, num_rows: 204017)\r\n>>> data['test']\r\nDataset(features: {'document': Value(dtype='string', id=None), 'summary': Value(dtype='string', id=None)}, num_rows: 11333)\r\n```\r\n\r\nThe first issue is, the instance counts don\u2019t match what I see on [the dataset's website](https:\/\/github.com\/EdinburghNLP\/XSum\/tree\/master\/XSum-Dataset#what-builds-the-xsum-dataset) (11,333 vs 11,334 for test set; 204,017 vs 204,045 for training set)\r\n```\r\n \u2026 training (90%, 204,045), validation (5%, 11,332), and test (5%, 11,334) set.\r\n```\r\nAny thoughts why? Perhaps @mariamabarham could help here, since she recently had a PR on this dataaset https:\/\/github.com\/huggingface\/datasets\/pull\/289 (reviewed by @patrickvonplaten) \r\n\r\nAnother issue is that the instances don't seem to have IDs. The original datasets provides IDs for the instances: https:\/\/github.com\/EdinburghNLP\/XSum\/blob\/master\/XSum-Dataset\/XSum-TRAINING-DEV-TEST-SPLIT-90-5-5.json but to be able to use them, the dataset sizes need to match. \r\n\r\nCC @jbragg \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/672\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/672\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/671","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/671\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/671\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/671\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/671","id":709093151,"node_id":"MDU6SXNzdWU3MDkwOTMxNTE=","number":671,"title":"[BUG] No such file or directory","user":{"login":"jbragg","id":2238344,"node_id":"MDQ6VXNlcjIyMzgzNDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2238344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jbragg","html_url":"https:\/\/github.com\/jbragg","followers_url":"https:\/\/api.github.com\/users\/jbragg\/followers","following_url":"https:\/\/api.github.com\/users\/jbragg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jbragg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jbragg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jbragg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jbragg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jbragg\/repos","events_url":"https:\/\/api.github.com\/users\/jbragg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jbragg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-25T16:38:54Z","updated_at":"2020-09-28T14:42:42Z","closed_at":"2020-09-28T14:42:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"This happens when both\r\n1. Huggingface datasets cache dir does not exist\r\n2. Try to load a local dataset script\r\n\r\nbuilder.py throws an error when trying to create a filelock in a directory (cache\/datasets) that does not exist\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/builder.py#L177\r\n\r\nTested on v1.0.2\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/671\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/671\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/670","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/670\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/670\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/670\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/670","id":709061231,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkzMTc4OTQw","number":670,"title":"Fix SQuAD metric kwargs description","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-25T16:08:57Z","updated_at":"2020-09-29T15:57:39Z","closed_at":"2020-09-29T15:57:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/670","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/670","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/670.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/670.patch","merged_at":"2020-09-29T15:57:37Z"},"body":"The `answer_start` field was missing in the kwargs docstring.\r\n\r\nThis should fix #657 \r\n\r\nFYI another fix was proposed by @tshrjn in #658 and suggests to remove this field.\r\nHowever IMO `answer_start` is useful to match the squad dataset format for consistency, even though it is not used in the metric computation. I think it's better to keep it this way, so that you can just give references=squad[\"answers\"] to .compute(). \r\n\r\nLet me know what sounds the best for you\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/670\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/670\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/669","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/669\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/669\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/669\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/669","id":708857595,"node_id":"MDU6SXNzdWU3MDg4NTc1OTU=","number":669,"title":"How to skip a example when running dataset.map","user":{"login":"xixiaoyao","id":24541791,"node_id":"MDQ6VXNlcjI0NTQxNzkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24541791?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/xixiaoyao","html_url":"https:\/\/github.com\/xixiaoyao","followers_url":"https:\/\/api.github.com\/users\/xixiaoyao\/followers","following_url":"https:\/\/api.github.com\/users\/xixiaoyao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/xixiaoyao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/xixiaoyao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/xixiaoyao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/xixiaoyao\/orgs","repos_url":"https:\/\/api.github.com\/users\/xixiaoyao\/repos","events_url":"https:\/\/api.github.com\/users\/xixiaoyao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/xixiaoyao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-25T11:17:53Z","updated_at":"2020-10-05T16:28:13Z","closed_at":"2020-10-05T16:28:13Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"in processing func, I process examples and detect some invalid examples, which I did not want it to be added into train dataset. However I did not find how to skip this recognized invalid example when doing dataset.map. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/669\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/669\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/668","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/668\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/668\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/668\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/668","id":708310956,"node_id":"MDU6SXNzdWU3MDgzMTA5NTY=","number":668,"title":"OverflowError when slicing with an array containing negative ids","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-24T16:27:14Z","updated_at":"2020-09-28T14:42:19Z","closed_at":"2020-09-28T14:42:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```python\r\nfrom datasets import Dataset\r\n\r\nd = ds.Dataset.from_dict({\"a\": range(10)})\r\n\r\nprint(d[0])\r\n# {'a': 0}\r\n\r\nprint(d[-1])\r\n# {'a': 9}\r\n\r\nprint(d[[0, -1]])\r\n# OverflowError\r\n```\r\nresults in\r\n```\r\n---------------------------------------------------------------------------\r\nOverflowError Traceback (most recent call last)\r\n in \r\n----> 1 d[[0, -1]]\r\n\r\n~\/Desktop\/hf\/nlp\/src\/datasets\/arrow_dataset.py in __getitem__(self, key)\r\n 1070 format_columns=self._format_columns,\r\n 1071 output_all_columns=self._output_all_columns,\r\n-> 1072 format_kwargs=self._format_kwargs,\r\n 1073 )\r\n 1074 \r\n\r\n~\/Desktop\/hf\/nlp\/src\/datasets\/arrow_dataset.py in _getitem(self, key, format_type, format_columns, output_all_columns, format_kwargs)\r\n 1025 indices = key\r\n 1026 \r\n-> 1027 indices_array = pa.array([int(i) for i in indices], type=pa.uint64())\r\n 1028 \r\n 1029 # Check if we need to convert indices\r\n\r\n~\/.virtualenvs\/hf-datasets\/lib\/python3.7\/site-packages\/pyarrow\/array.pxi in pyarrow.lib.array()\r\n\r\n~\/.virtualenvs\/hf-datasets\/lib\/python3.7\/site-packages\/pyarrow\/array.pxi in pyarrow.lib._sequence_to_array()\r\n\r\nOverflowError: can't convert negative value to unsigned int\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/668\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/668\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/667","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/667\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/667\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/667\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/667","id":708258392,"node_id":"MDU6SXNzdWU3MDgyNTgzOTI=","number":667,"title":"Loss not decrease with Datasets and Transformers","user":{"login":"wangcongcong123","id":23032865,"node_id":"MDQ6VXNlcjIzMDMyODY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23032865?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wangcongcong123","html_url":"https:\/\/github.com\/wangcongcong123","followers_url":"https:\/\/api.github.com\/users\/wangcongcong123\/followers","following_url":"https:\/\/api.github.com\/users\/wangcongcong123\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wangcongcong123\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wangcongcong123\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wangcongcong123\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wangcongcong123\/orgs","repos_url":"https:\/\/api.github.com\/users\/wangcongcong123\/repos","events_url":"https:\/\/api.github.com\/users\/wangcongcong123\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wangcongcong123\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-24T15:14:43Z","updated_at":"2021-01-01T20:01:25Z","closed_at":"2021-01-01T20:01:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"HI,\r\n\r\nThe following script is used to fine-tune a BertForSequenceClassification model on SST2.\r\n\r\nThe script is adapted from [this colab](https:\/\/colab.research.google.com\/github\/huggingface\/datasets\/blob\/master\/notebooks\/Overview.ipynb) that presents an example of fine-tuning BertForQuestionAnswering using squad dataset. In that colab, loss works fine. When I adapt it to SST2, the loss fails to decrease as it should. I attach the adapted script below and appreciate anyone pointing out what I miss?\r\n\r\n```python\r\nimport torch\r\nfrom datasets import load_dataset\r\nfrom transformers import BertForSequenceClassification\r\nfrom transformers import BertTokenizerFast\r\n# Load our training dataset and tokenizer\r\ndataset = load_dataset(\"glue\", 'sst2')\r\ntokenizer = BertTokenizerFast.from_pretrained('bert-base-cased')\r\ndel dataset[\"test\"] # let's remove it in this demo\r\n\r\n# Tokenize our training dataset\r\ndef convert_to_features(example_batch):\r\n encodings = tokenizer(example_batch[\"sentence\"])\r\n encodings.update({\"labels\": example_batch[\"label\"]})\r\n return encodings\r\n\r\nencoded_dataset = dataset.map(convert_to_features, batched=True)\r\n# Format our dataset to outputs torch.Tensor to train a pytorch model\r\ncolumns = ['input_ids', 'token_type_ids', 'attention_mask', 'labels']\r\nencoded_dataset.set_format(type='torch', columns=columns)\r\n\r\n# Instantiate a PyTorch Dataloader around our dataset\r\n# Let's do dynamic batching (pad on the fly with our own collate_fn)\r\ndef collate_fn(examples):\r\n return tokenizer.pad(examples, return_tensors='pt')\r\n\r\ndataloader = torch.utils.data.DataLoader(encoded_dataset['train'], collate_fn=collate_fn, batch_size=8)\r\n# Now let's train our model\r\ndevice = 'cuda' if torch.cuda.is_available() else 'cpu'\r\n# Let's load a pretrained Bert model and a simple optimizer\r\nmodel = BertForSequenceClassification.from_pretrained('bert-base-cased', return_dict=True)\r\noptimizer = torch.optim.Adam(model.parameters(), lr=1e-5)\r\nmodel.train().to(device)\r\nfor i, batch in enumerate(dataloader):\r\n batch.to(device)\r\n outputs = model(**batch)\r\n loss = outputs.loss\r\n loss.backward()\r\n optimizer.step()\r\n model.zero_grad()\r\n print(f'Step {i} - loss: {loss:.3}')\r\n\r\n\r\n```\r\nIn case needed.\r\n\r\n- datasets == 1.0.2\r\n- transformers == 3.2.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/667\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/667\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/666","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/666\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/666\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/666\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/666","id":707608578,"node_id":"MDU6SXNzdWU3MDc2MDg1Nzg=","number":666,"title":"Does both 'bookcorpus' and 'wikipedia' belong to the same datasets which Google used for pretraining BERT?","user":{"login":"wahab4114","id":31090427,"node_id":"MDQ6VXNlcjMxMDkwNDI3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31090427?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wahab4114","html_url":"https:\/\/github.com\/wahab4114","followers_url":"https:\/\/api.github.com\/users\/wahab4114\/followers","following_url":"https:\/\/api.github.com\/users\/wahab4114\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wahab4114\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wahab4114\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wahab4114\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wahab4114\/orgs","repos_url":"https:\/\/api.github.com\/users\/wahab4114\/repos","events_url":"https:\/\/api.github.com\/users\/wahab4114\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wahab4114\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-23T19:02:25Z","updated_at":"2020-10-27T15:19:25Z","closed_at":"2020-10-27T15:19:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/666\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/666\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/665","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/665\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/665\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/665\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/665","id":707037738,"node_id":"MDU6SXNzdWU3MDcwMzc3Mzg=","number":665,"title":"runing dataset.map, it raises TypeError: can't pickle Tokenizer objects","user":{"login":"xixiaoyao","id":24541791,"node_id":"MDQ6VXNlcjI0NTQxNzkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24541791?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/xixiaoyao","html_url":"https:\/\/github.com\/xixiaoyao","followers_url":"https:\/\/api.github.com\/users\/xixiaoyao\/followers","following_url":"https:\/\/api.github.com\/users\/xixiaoyao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/xixiaoyao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/xixiaoyao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/xixiaoyao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/xixiaoyao\/orgs","repos_url":"https:\/\/api.github.com\/users\/xixiaoyao\/repos","events_url":"https:\/\/api.github.com\/users\/xixiaoyao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/xixiaoyao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-09-23T04:28:14Z","updated_at":"2020-10-08T09:32:16Z","closed_at":"2020-10-08T09:32:16Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I load squad dataset. Then want to process data use following function with `Huggingface Transformers LongformerTokenizer`.\r\n\r\n```\r\ndef convert_to_features(example):\r\n # Tokenize contexts and questions (as pairs of inputs)\r\n input_pairs = [example['question'], example['context']]\r\n encodings = tokenizer.encode_plus(input_pairs, pad_to_max_length=True, max_length=512)\r\n context_encodings = tokenizer.encode_plus(example['context'])\r\n \r\n\r\n # Compute start and end tokens for labels using Transformers's fast tokenizers alignement methodes.\r\n # this will give us the position of answer span in the context text\r\n start_idx, end_idx = get_correct_alignement(example['context'], example['answers'])\r\n start_positions_context = context_encodings.char_to_token(start_idx)\r\n end_positions_context = context_encodings.char_to_token(end_idx-1)\r\n\r\n # here we will compute the start and end position of the answer in the whole example\r\n # as the example is encoded like this question<\/s><\/s> context<\/s>\r\n # and we know the postion of the answer in the context\r\n # we can just find out the index of the sep token and then add that to position + 1 (+1 because there are two sep tokens)\r\n # this will give us the position of the answer span in whole example \r\n sep_idx = encodings['input_ids'].index(tokenizer.sep_token_id)\r\n start_positions = start_positions_context + sep_idx + 1\r\n end_positions = end_positions_context + sep_idx + 1\r\n\r\n if end_positions > 512:\r\n start_positions, end_positions = 0, 0\r\n\r\n encodings.update({'start_positions': start_positions,\r\n 'end_positions': end_positions,\r\n 'attention_mask': encodings['attention_mask']})\r\n return encodings\r\n```\r\n\r\nThen I run `dataset.map(convert_to_features)`, it raise\r\n```\r\nIn [59]: a.map(convert_to_features) \r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n----> 1 a.map(convert_to_features)\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)\r\n 1242 fn_kwargs=fn_kwargs,\r\n 1243 new_fingerprint=new_fingerprint,\r\n-> 1244 update_data=update_data,\r\n 1245 )\r\n 1246 else:\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 151 \"output_all_columns\": self._output_all_columns,\r\n 152 }\r\n--> 153 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 154 if new_format[\"columns\"] is not None:\r\n 155 new_format[\"columns\"] = list(set(new_format[\"columns\"]) & set(out.column_names))\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 156 kwargs_for_fingerprint[\"fingerprint_name\"] = fingerprint_name\r\n 157 kwargs[fingerprint_name] = update_fingerprint(\r\n--> 158 self._fingerprint, transform, kwargs_for_fingerprint\r\n 159 )\r\n 160 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in update_fingerprint(fingerprint, transform, transform_args)\r\n 103 for key in sorted(transform_args):\r\n 104 hasher.update(key)\r\n--> 105 hasher.update(transform_args[key])\r\n 106 return hasher.hexdigest()\r\n 107 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in update(self, value)\r\n 55 def update(self, value):\r\n 56 self.m.update(f\"=={type(value)}==\".encode(\"utf8\"))\r\n---> 57 self.m.update(self.hash(value).encode(\"utf-8\"))\r\n 58 \r\n 59 def hexdigest(self):\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in hash(cls, value)\r\n 51 return cls.dispatch[type(value)](cls, value)\r\n 52 else:\r\n---> 53 return cls.hash_default(value)\r\n 54 \r\n 55 def update(self, value):\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in hash_default(cls, value)\r\n 44 @classmethod\r\n 45 def hash_default(cls, value):\r\n---> 46 return cls.hash_bytes(dumps(value))\r\n 47 \r\n 48 @classmethod\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py in dumps(obj)\r\n 365 file = StringIO()\r\n 366 with _no_cache_fields(obj):\r\n--> 367 dump(obj, file)\r\n 368 return file.getvalue()\r\n 369 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py in dump(obj, file)\r\n 337 def dump(obj, file):\r\n 338 \"\"\"pickle an object to a file\"\"\"\r\n--> 339 Pickler(file, recurse=True).dump(obj)\r\n 340 return\r\n 341 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/dill\/_dill.py in dump(self, obj)\r\n 444 raise PicklingError(msg)\r\n 445 else:\r\n--> 446 StockPickler.dump(self, obj)\r\n 447 stack.clear() # clear record of 'recursion-sensitive' pickled objects\r\n 448 return\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in dump(self, obj)\r\n 435 if self.proto >= 4:\r\n 436 self.framer.start_framing()\r\n--> 437 self.save(obj)\r\n 438 self.write(STOP)\r\n 439 self.framer.end_framing()\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/dill\/_dill.py in save_function(pickler, obj)\r\n 1436 globs, obj.__name__,\r\n 1437 obj.__defaults__, obj.__closure__,\r\n-> 1438 obj.__dict__, fkwdefaults), obj=obj)\r\n 1439 else:\r\n 1440 _super = ('super' in getattr(obj.func_code,'co_names',())) and (_byref is not None) and getattr(pickler, '_recurse', False)\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj)\r\n 636 else:\r\n 637 save(func)\r\n--> 638 save(args)\r\n 639 write(REDUCE)\r\n 640 \r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save_tuple(self, obj)\r\n 787 write(MARK)\r\n 788 for element in obj:\r\n--> 789 save(element)\r\n 790 \r\n 791 if id(obj) in memo:\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/dill\/_dill.py in save_module_dict(pickler, obj)\r\n 931 # we only care about session the first pass thru\r\n 932 pickler._session = False\r\n--> 933 StockPickler.save_dict(pickler, obj)\r\n 934 log.info(\"# D2\")\r\n 935 return\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save_dict(self, obj)\r\n 857 \r\n 858 self.memoize(obj)\r\n--> 859 self._batch_setitems(obj.items())\r\n 860 \r\n 861 dispatch[dict] = save_dict\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in _batch_setitems(self, items)\r\n 883 for k, v in tmp:\r\n 884 save(k)\r\n--> 885 save(v)\r\n 886 write(SETITEMS)\r\n 887 elif n:\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 547 \r\n 548 # Save the reduce() output and finally memoize the object\r\n--> 549 self.save_reduce(obj=obj, *rv)\r\n 550 \r\n 551 def persistent_id(self, obj):\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj)\r\n 660 \r\n 661 if state is not None:\r\n--> 662 save(state)\r\n 663 write(BUILD)\r\n 664 \r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/dill\/_dill.py in save_module_dict(pickler, obj)\r\n 931 # we only care about session the first pass thru\r\n 932 pickler._session = False\r\n--> 933 StockPickler.save_dict(pickler, obj)\r\n 934 log.info(\"# D2\")\r\n 935 return\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save_dict(self, obj)\r\n 857 \r\n 858 self.memoize(obj)\r\n--> 859 self._batch_setitems(obj.items())\r\n 860 \r\n 861 dispatch[dict] = save_dict\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in _batch_setitems(self, items)\r\n 883 for k, v in tmp:\r\n 884 save(k)\r\n--> 885 save(v)\r\n 886 write(SETITEMS)\r\n 887 elif n:\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 547 \r\n 548 # Save the reduce() output and finally memoize the object\r\n--> 549 self.save_reduce(obj=obj, *rv)\r\n 550 \r\n 551 def persistent_id(self, obj):\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj)\r\n 660 \r\n 661 if state is not None:\r\n--> 662 save(state)\r\n 663 write(BUILD)\r\n 664 \r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 502 f = self.dispatch.get(t)\r\n 503 if f is not None:\r\n--> 504 f(self, obj) # Call unbound method with explicit self\r\n 505 return\r\n 506 \r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/dill\/_dill.py in save_module_dict(pickler, obj)\r\n 931 # we only care about session the first pass thru\r\n 932 pickler._session = False\r\n--> 933 StockPickler.save_dict(pickler, obj)\r\n 934 log.info(\"# D2\")\r\n 935 return\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save_dict(self, obj)\r\n 857 \r\n 858 self.memoize(obj)\r\n--> 859 self._batch_setitems(obj.items())\r\n 860 \r\n 861 dispatch[dict] = save_dict\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in _batch_setitems(self, items)\r\n 883 for k, v in tmp:\r\n 884 save(k)\r\n--> 885 save(v)\r\n 886 write(SETITEMS)\r\n 887 elif n:\r\n\r\n\/opt\/conda\/lib\/python3.7\/pickle.py in save(self, obj, save_persistent_id)\r\n 522 reduce = getattr(obj, \"__reduce_ex__\", None)\r\n 523 if reduce is not None:\r\n--> 524 rv = reduce(self.proto)\r\n 525 else:\r\n 526 reduce = getattr(obj, \"__reduce__\", None)\r\n\r\nTypeError: can't pickle Tokenizer objects\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/665\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/665\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/664","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/664\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/664\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/664\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/664","id":707017791,"node_id":"MDU6SXNzdWU3MDcwMTc3OTE=","number":664,"title":"load_dataset from local squad.py, raise error: TypeError: 'NoneType' object is not callable ","user":{"login":"xixiaoyao","id":24541791,"node_id":"MDQ6VXNlcjI0NTQxNzkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24541791?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/xixiaoyao","html_url":"https:\/\/github.com\/xixiaoyao","followers_url":"https:\/\/api.github.com\/users\/xixiaoyao\/followers","following_url":"https:\/\/api.github.com\/users\/xixiaoyao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/xixiaoyao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/xixiaoyao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/xixiaoyao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/xixiaoyao\/orgs","repos_url":"https:\/\/api.github.com\/users\/xixiaoyao\/repos","events_url":"https:\/\/api.github.com\/users\/xixiaoyao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/xixiaoyao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-09-23T03:53:36Z","updated_at":"2020-10-20T09:06:13Z","closed_at":"2020-10-20T09:06:13Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"\r\nversion: 1.0.2\r\n\r\n```\r\ntrain_dataset = datasets.load_dataset('squad') \r\n```\r\n\r\nThe above code can works. However, when I download the squad.py from your server, and saved as `my_squad.py` to local. I run followings raise errors.\r\n```\r\ntrain_dataset = datasets.load_dataset('.\/my_squad.py') \r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n----> 1 train_dataset = nlp.load_dataset('.\/my_squad.py')\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, save_infos, script_version, **config_kwargs)\r\n 602 hash=hash,\r\n 603 features=features,\r\n--> 604 **config_kwargs,\r\n 605 )\r\n 606 \r\n\r\nTypeError: 'NoneType' object is not callable\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/664\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/664\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/663","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/663\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/663\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/663\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/663","id":706732636,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkxMjI3NzUz","number":663,"title":"Created dataset card snli.md","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[{"id":2067401494,"node_id":"MDU6TGFiZWwyMDY3NDAxNDk0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Dataset%20discussion","name":"Dataset discussion","color":"72f99f","default":false,"description":"Discussions on the datasets"}],"state":"closed","locked":false,"assignee":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"assignees":[{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":11,"created_at":"2020-09-22T22:29:37Z","updated_at":"2020-10-13T17:05:20Z","closed_at":"2020-10-12T20:26:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/663","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/663","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/663.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/663.patch","merged_at":"2020-10-12T20:26:52Z"},"body":"First draft of a dataset card using the SNLI corpus as an example.\r\n\r\nThis is mostly based on the [Google Doc draft](https:\/\/docs.google.com\/document\/d\/1dKPGP-dA2W0QoTRGfqQ5eBp0CeSsTy7g2yM8RseHtos\/edit), but I added a few sections and moved some things around. \r\n\r\n- I moved **Who Was Involved** to follow **Language**, both because I thought the authors should be presented more towards the front and because I think it makes sense to present the speakers close to the language so it doesn't have to be repeated.\r\n\r\n- I created a section I called **Data Characteristics** by pulling some things out of the other sections. I was thinking that this would be more about the language use in context of the specific task construction. That name isn't very descriptive though and could probably be improved.\r\n-- Domain and language type out of **Language**. I particularly wanted to keep the Language section as simple and as abstracted from the task as possible.\r\n-- 'How was the data collected' out of **Who Was Involved** \r\n-- Normalization out of **Features\/Dataset Structure** \r\n-- I also added an annotation process section.\r\n\r\n- I kept the **Features** section mostly the same as the Google Doc, but I renamed it **Dataset Structure** to more clearly separate it from the language use, and added some links to the documentation pages. \r\n\r\n- I also kept **Tasks Supported**, **Known Limitations**, and **Licensing Information** mostly the same. Looking at it again though, maybe **Tasks Supported** should come before **Data Characteristics**?\r\n\r\nThe trickiest part about writing a dataset card for the SNLI corpus specifically is that it's built on datasets which are themselves built on datasets so I had to dig in a lot of places to find information. I think this will be easier with other datasets and once there is more uptake of dataset cards so they can just link to each other. (Maybe that needs to be an added section?)\r\n\r\nI also made an effort not to repeat information across the sections or to refer to a previous section if the information was relevant in a later one. Is there too much repetition still?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/663\/reactions","total_count":3,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/663\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/662","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/662\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/662\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/662\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/662","id":706689866,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkxMTkyNTM3","number":662,"title":"Created dataset card snli.md","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[{"id":2067401494,"node_id":"MDU6TGFiZWwyMDY3NDAxNDk0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Dataset%20discussion","name":"Dataset discussion","color":"72f99f","default":false,"description":"Discussions on the datasets"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-22T21:00:17Z","updated_at":"2020-09-22T21:26:21Z","closed_at":"2020-09-22T21:26:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/662","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/662","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/662.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/662.patch","merged_at":null},"body":"First draft of a dataset card using the SNLI corpus as an example","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/662\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/662\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/661","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/661\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/661\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/661\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/661","id":706465936,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkxMDA3NjEw","number":661,"title":"Replace pa.OSFile by open","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-22T15:05:59Z","updated_at":"2021-05-05T18:24:36Z","closed_at":"2020-09-22T15:15:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/661","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/661","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/661.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/661.patch","merged_at":null},"body":"It should fix #643 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/661\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/661\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/660","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/660\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/660\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/660\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/660","id":706324032,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkwODkyMjQ0","number":660,"title":"add openwebtext","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-09-22T12:05:22Z","updated_at":"2020-10-06T09:20:10Z","closed_at":"2020-09-28T09:07:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/660","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/660","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/660.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/660.patch","merged_at":"2020-09-28T09:07:26Z"},"body":"This adds [The OpenWebText Corpus](https:\/\/skylion007.github.io\/OpenWebTextCorpus\/), which is a clean and large text corpus for nlp pretraining. It is an open source effort to reproduce OpenAI\u2019s WebText dataset used by GPT-2, and it is also needed to reproduce ELECTRA.\r\n\r\nIt solves #132 .\r\n\r\n### Besides dataset building script, I made some changes to the library.\r\n\r\n1. Extract large amount of compressed files with multi processing\r\nI add a `num_proc` argument to `DownloadManager.extract` and pass this `num_proc` to `map_nested`. So I can decompress 20 thousands compressed files faster. `num_proc` I add is default to `None`, so it shouldn't break any other thing.\r\n\r\n2. In `cached_path`, I change the order to deal with different kind of compressed files (zip, tar, gzip)\r\nBecause there is no way to 100% detect a file is a zip file (see [this](https:\/\/stackoverflow.com\/questions\/18194688\/how-can-i-determine-if-a-file-is-a-zip-file)), I found it wrongly detect `'.\/datasets\/downloads\/extracted\/58764bd6898fa339b25d92e7fbbc3d8dbf64fb504edff1a30a1d7d99d1561027\/openwebtext\/urlsf_subset13-630_data.xz'` as a zip and try decompress it with zip, sure it will get error. So I made it detect wheter the file is tar or gzip first and detect zip in the last.\r\n\r\n3. `MockDownloadManager.extract`\r\nCuz I pass `num_proc` to `DownloadManager.extract`, I also have to make `MockDownloadManager.extract` to accept extra keywork arguments. So I make it `extract(path, *args, **kwargs)`, but just return the path as original implementation.\r\n\r\n**Note**: If there is better way for points mentioned above, thought I would like to help, unless we can solve point4 (make dataset building fast), I may not be able to afford rebuild the dataset again because of change of the dataset script (Building the dataset cost me 4 days). \r\n\r\n### There is something I think we can improve\r\n\r\n4. Long time to decompress compressed files\r\nEven I decompress those 20 thousands compressed files with 12 process on my 16 core 3.x Ghz server. It still took about 3 ~ 4days to complete dataset building. Most of time spent on decompress those files.\r\n\r\n### Info about the source data\r\nThe source data is an tar.xz file with following structure, files\/directory beyond compressed file is what can we get after decompress it.\r\n```\r\nopenwebtext.tar.xz\r\n |__ openwebtext\r\n |__subset000.xz\r\n | |__ ....txt\r\n | |__ ....txt\r\n | ...\r\n |__ subset001.xz\r\n |\r\n ....\r\n```\r\nAnd this the structure of dummy data, same as the original one.\r\n```\r\ndummy_data.zip\r\n |__ dummy_data\r\n |__ openwebtext\r\n |__fake_subset-1_data-dirxz # actually it is a directory\r\n | |__ ....txt\r\n | |__ ....txt\r\n |__ fake_subset-2_data-dirxz\r\n |__ ....txt\r\n |__ ....txt\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/660\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/660\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/659","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/659\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/659\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/659\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/659","id":706231506,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkwODE4NTY1","number":659,"title":"Keep new columns in transmit format","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-22T09:47:23Z","updated_at":"2020-09-22T10:07:22Z","closed_at":"2020-09-22T10:07:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/659","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/659","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/659.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/659.patch","merged_at":"2020-09-22T10:07:20Z"},"body":"When a dataset is formatted with a list of columns that `__getitem__` should return, then calling `map` to add new columns doesn't add the new columns to this list. \r\n\r\nIt caused `KeyError` issues in #620 \r\n\r\nI changed the logic to add those new columns to the list that `__getitem__` should return.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/659\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/659\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/658","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/658\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/658\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/658\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/658","id":706206247,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkwNzk4MDc0","number":658,"title":"Fix squad metric's Features","user":{"login":"tshrjn","id":8372098,"node_id":"MDQ6VXNlcjgzNzIwOTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8372098?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tshrjn","html_url":"https:\/\/github.com\/tshrjn","followers_url":"https:\/\/api.github.com\/users\/tshrjn\/followers","following_url":"https:\/\/api.github.com\/users\/tshrjn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tshrjn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tshrjn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tshrjn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tshrjn\/orgs","repos_url":"https:\/\/api.github.com\/users\/tshrjn\/repos","events_url":"https:\/\/api.github.com\/users\/tshrjn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tshrjn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-22T09:09:52Z","updated_at":"2020-09-29T15:58:30Z","closed_at":"2020-09-29T15:58:30Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/658","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/658","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/658.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/658.patch","merged_at":null},"body":"Resolves issue [657](https:\/\/github.com\/huggingface\/datasets\/issues\/657).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/658\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/658\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/657","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/657\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/657\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/657\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/657","id":706204383,"node_id":"MDU6SXNzdWU3MDYyMDQzODM=","number":657,"title":"Squad Metric Description & Feature Mismatch","user":{"login":"tshrjn","id":8372098,"node_id":"MDQ6VXNlcjgzNzIwOTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8372098?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tshrjn","html_url":"https:\/\/github.com\/tshrjn","followers_url":"https:\/\/api.github.com\/users\/tshrjn\/followers","following_url":"https:\/\/api.github.com\/users\/tshrjn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tshrjn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tshrjn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tshrjn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tshrjn\/orgs","repos_url":"https:\/\/api.github.com\/users\/tshrjn\/repos","events_url":"https:\/\/api.github.com\/users\/tshrjn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tshrjn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-22T09:07:00Z","updated_at":"2020-10-13T02:16:56Z","closed_at":"2020-09-29T15:57:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The [description](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/metrics\/squad\/squad.py#L39) doesn't mention `answer_start` in squad. However the `datasets.features` require [it](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/metrics\/squad\/squad.py#L68). It's also not used in the evaluation.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/657\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/657\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/656","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/656\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/656\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/656\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/656","id":705736319,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkwNDEwODAz","number":656,"title":"Use multiprocess from pathos for multiprocessing","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-09-21T16:12:19Z","updated_at":"2020-09-28T14:45:40Z","closed_at":"2020-09-28T14:45:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/656","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/656","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/656.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/656.patch","merged_at":"2020-09-28T14:45:39Z"},"body":"[Multiprocess](https:\/\/github.com\/uqfoundation\/multiprocess) (from the [pathos](https:\/\/github.com\/uqfoundation\/pathos) project) allows to use lambda functions in multiprocessed map.\r\nIt was suggested to use it by @kandorm.\r\n\r\nWe're already using dill which is its only dependency.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/656\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/656\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/655","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/655\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/655\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/655\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/655","id":705672208,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkwMzU4OTQ3","number":655,"title":"added Winogrande debiased subset","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-21T14:51:08Z","updated_at":"2020-09-21T16:20:40Z","closed_at":"2020-09-21T16:16:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/655","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/655","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/655.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/655.patch","merged_at":"2020-09-21T16:16:04Z"},"body":"The [Winogrande](https:\/\/arxiv.org\/abs\/1907.10641) paper mentions a `debiased` subset that wasn't in the first release; this PR adds it.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/655\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/655\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/654","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/654\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/654\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/654\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/654","id":705511058,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkwMjI1Nzk3","number":654,"title":"Allow empty inputs in metrics","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-21T11:26:36Z","updated_at":"2020-10-06T03:51:48Z","closed_at":"2020-09-21T16:13:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/654","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/654","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/654.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/654.patch","merged_at":"2020-09-21T16:13:38Z"},"body":"There was an arrow error when trying to compute a metric with empty inputs. The error was occurring when reading the arrow file, before calling metric._compute.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/654\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/654\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/653","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/653\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/653\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/653\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/653","id":705482391,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkwMjAxOTg4","number":653,"title":"handle data alteration when trying type","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-21T10:41:49Z","updated_at":"2020-09-21T16:13:06Z","closed_at":"2020-09-21T16:13:05Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/653","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/653","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/653.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/653.patch","merged_at":"2020-09-21T16:13:05Z"},"body":"Fix #649 \r\n\r\nThe bug came from the type inference that didn't handle a weird case in Pyarrow.\r\nIndeed this code runs without error but alters the data in arrow:\r\n```python\r\nimport pyarrow as pa\r\n\r\ntype = pa.struct({\"a\": pa.struct({\"b\": pa.string()})})\r\narray_with_altered_data = pa.array([{\"a\": {\"b\": \"foo\", \"c\": \"bar\"}}] * 10, type=type)\r\nprint(array_with_altered_data[0].as_py())\r\n# {'a': {'b': 'foo'}} -> the sub-field \"c\" is missing\r\n```\r\n(I don't know if this is intended in pyarrow tbh)\r\n\r\nWe didn't take this case into account during type inference. By default it was keeping old features and maybe alter data.\r\nTo fix that I added a line that checks that the first element of the array is not altered.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/653\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/653\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/652","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/652\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/652\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/652\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/652","id":705390850,"node_id":"MDExOlB1bGxSZXF1ZXN0NDkwMTI3MjIx","number":652,"title":"handle connection error in download_prepared_from_hf_gcs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-21T08:21:11Z","updated_at":"2020-09-21T08:28:43Z","closed_at":"2020-09-21T08:28:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/652","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/652","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/652.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/652.patch","merged_at":"2020-09-21T08:28:42Z"},"body":"Fix #647 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/652\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/652\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/651","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/651\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/651\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/651\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/651","id":705212034,"node_id":"MDU6SXNzdWU3MDUyMTIwMzQ=","number":651,"title":"Problem with JSON dataset format","user":{"login":"vikigenius","id":12724810,"node_id":"MDQ6VXNlcjEyNzI0ODEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12724810?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vikigenius","html_url":"https:\/\/github.com\/vikigenius","followers_url":"https:\/\/api.github.com\/users\/vikigenius\/followers","following_url":"https:\/\/api.github.com\/users\/vikigenius\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vikigenius\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vikigenius\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vikigenius\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vikigenius\/orgs","repos_url":"https:\/\/api.github.com\/users\/vikigenius\/repos","events_url":"https:\/\/api.github.com\/users\/vikigenius\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vikigenius\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-20T23:57:14Z","updated_at":"2020-09-21T12:14:24Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I have a local json dataset with the following form.\r\n\r\n{\r\n 'id01234': {'key1': value1, 'key2': value2, 'key3': value3},\r\n 'id01235': {'key1': value1, 'key2': value2, 'key3': value3},\r\n .\r\n .\r\n .\r\n 'id09999': {'key1': value1, 'key2': value2, 'key3': value3}\r\n}\r\nNote that instead of a list of records it's basically a dictionary of key value pairs with the keys being the record_ids and the values being the corresponding record.\r\n\r\nReading this with json:\r\n\r\n```\r\ndata = datasets.load('json', data_files='path_to_local.json')\r\n```\r\nThrows an error and asks me to chose a field. What's the right way to handle this?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/651\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/651\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/650","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/650\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/650\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/650\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/650","id":704861844,"node_id":"MDU6SXNzdWU3MDQ4NjE4NDQ=","number":650,"title":"dummy data testing can't test datasets using `dl_manager.extract` in `_split_generators`","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-09-19T11:07:03Z","updated_at":"2020-09-22T11:54:10Z","closed_at":"2020-09-22T11:54:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I recently want to add a dataset whose source data is like this\r\n```\r\nopenwebtext.tar.xz\r\n |__ openwebtext\r\n |__subset000.xz\r\n | |__ ....txt\r\n | |__ ....txt\r\n | ...\r\n |__ subset001.xz\r\n |\r\n ....\r\n```\r\nSo I wrote `openwebtext.py` like this\r\n```\r\n def _split_generators(self, dl_manager):\r\n dl_dir = dl_manager.download_and_extract(_URL)\r\n owt_dir = os.path.join(dl_dir, 'openwebtext')\r\n subset_xzs = [\r\n os.path.join(owt_dir, file_name) for file_name in os.listdir(owt_dir) if file_name.endswith('xz') # filter out ...xz.lock\r\n ]\r\n ex_dirs = dl_manager.extract(subset_xzs, num_proc=round(os.cpu_count()*0.75))\r\n nested_txt_files = [ \r\n [ \r\n os.path.join(ex_dir,txt_file_name) for txt_file_name in os.listdir(ex_dir) if txt_file_name.endswith('txt')\r\n ] for ex_dir in ex_dirs\r\n ]\r\n txt_files = chain(*nested_txt_files)\r\n return [\r\n datasets.SplitGenerator(\r\n name=datasets.Split.TRAIN, gen_kwargs={\"txt_files\": txt_files}\r\n ),\r\n ]\r\n```\r\nAll went good, I can load and use real openwebtext, except when I try to test with dummy data. The problem is `MockDownloadManager.extract` do nothing, so `ex_dirs = dl_manager.extract(subset_xzs)` won't decompress `subset_xxx.xz`s for me.\r\n\r\nHow should I do ? Or you can modify `MockDownloadManager` to make it like a real `DownloadManager` ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/650\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/650\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/649","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/649\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/649\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/649\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/649","id":704838415,"node_id":"MDU6SXNzdWU3MDQ4Mzg0MTU=","number":649,"title":"Inconsistent behavior in map","user":{"login":"krandiash","id":10166085,"node_id":"MDQ6VXNlcjEwMTY2MDg1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10166085?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/krandiash","html_url":"https:\/\/github.com\/krandiash","followers_url":"https:\/\/api.github.com\/users\/krandiash\/followers","following_url":"https:\/\/api.github.com\/users\/krandiash\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/krandiash\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/krandiash\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/krandiash\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/krandiash\/orgs","repos_url":"https:\/\/api.github.com\/users\/krandiash\/repos","events_url":"https:\/\/api.github.com\/users\/krandiash\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/krandiash\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2020-09-19T08:41:12Z","updated_at":"2020-09-21T16:13:05Z","closed_at":"2020-09-21T16:13:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm observing inconsistent behavior when applying .map(). This happens specifically when I'm incrementally adding onto a feature that is a nested dictionary. Here's a simple example that reproduces the problem.\r\n\r\n```python\r\nimport datasets\r\n\r\n# Dataset with a single feature called 'field' consisting of two examples\r\ndataset = datasets.Dataset.from_dict({'field': ['a', 'b']})\r\nprint(dataset[0])\r\n# outputs\r\n{'field': 'a'}\r\n\r\n# Map this dataset to create another feature called 'otherfield', which is a dictionary containing a key called 'capital'\r\ndataset = dataset.map(lambda example: {'otherfield': {'capital': example['field'].capitalize()}})\r\nprint(dataset[0])\r\n# output is okay\r\n{'field': 'a', 'otherfield': {'capital': 'A'}}\r\n\r\n# Now I want to map again to modify 'otherfield', by adding another key called 'append_x' to the dictionary under 'otherfield'\r\nprint(dataset.map(lambda example: {'otherfield': {'append_x': example['field'] + 'x'}})[0])\r\n# printing out the first example after applying the map shows that the new key 'append_x' doesn't get added\r\n# it also messes up the value stored at 'capital'\r\n{'field': 'a', 'otherfield': {'capital': None}}\r\n\r\n# Instead, I try to do the same thing by using a different mapped fn\r\nprint(dataset.map(lambda example: {'otherfield': {'append_x': example['field'] + 'x', 'capital': example['otherfield']['capital']}})[0])\r\n# this preserves the value under capital, but still no 'append_x'\r\n{'field': 'a', 'otherfield': {'capital': 'A'}}\r\n\r\n# Instead, I try to pass 'otherfield' to remove_columns\r\nprint(dataset.map(lambda example: {'otherfield': {'append_x': example['field'] + 'x', 'capital': example['otherfield']['capital']}}, remove_columns=['otherfield'])[0])\r\n# this still doesn't fix the problem\r\n{'field': 'a', 'otherfield': {'capital': 'A'}}\r\n\r\n# Alternately, here's what happens if I just directly map both 'capital' and 'append_x' on a fresh dataset.\r\n\r\n# Recreate the dataset\r\ndataset = datasets.Dataset.from_dict({'field': ['a', 'b']})\r\n# Now map the entire 'otherfield' dict directly, instead of incrementally as before\r\nprint(dataset.map(lambda example: {'otherfield': {'append_x': example['field'] + 'x', 'capital': example['field'].capitalize()}})[0])\r\n# This looks good!\r\n{'field': 'a', 'otherfield': {'append_x': 'ax', 'capital': 'A'}}\r\n```\r\n\r\nThis might be a new issue, because I didn't see this behavior in the `nlp` library. \r\n\r\nAny help is appreciated!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/649\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/649\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/648","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/648\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/648\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/648\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/648","id":704753123,"node_id":"MDU6SXNzdWU3MDQ3NTMxMjM=","number":648,"title":"offset overflow when multiprocessing batched map on large datasets.","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-19T02:15:11Z","updated_at":"2020-09-19T16:47:07Z","closed_at":"2020-09-19T16:46:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It only happened when \"multiprocessing\" + \"batched\" + \"large dataset\" at the same time.\r\n\r\n```\r\ndef bprocess(examples):\r\n examples['len'] = []\r\n for text in examples['text']:\r\n examples['len'].append(len(text))\r\n return examples\r\nwiki.map(brpocess, batched=True, num_proc=8)\r\n```\r\n```\r\n---------------------------------------------------------------------------\r\nRemoteTraceback Traceback (most recent call last)\r\nRemoteTraceback: \r\n\"\"\"\r\nTraceback (most recent call last):\r\n File \"\/home\/yisiang\/miniconda3\/envs\/ml\/lib\/python3.7\/multiprocessing\/pool.py\", line 121, in worker\r\n result = (True, func(*args, **kwds))\r\n File \"\/home\/yisiang\/datasets\/src\/datasets\/arrow_dataset.py\", line 153, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/yisiang\/datasets\/src\/datasets\/fingerprint.py\", line 163, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/yisiang\/datasets\/src\/datasets\/arrow_dataset.py\", line 1486, in _map_single\r\n batch = self[i : i + batch_size]\r\n File \"\/home\/yisiang\/datasets\/src\/datasets\/arrow_dataset.py\", line 1071, in __getitem__\r\n format_kwargs=self._format_kwargs,\r\n File \"\/home\/yisiang\/datasets\/src\/datasets\/arrow_dataset.py\", line 972, in _getitem\r\n data_subset = self._data.take(indices_array)\r\n File \"pyarrow\/table.pxi\", line 1145, in pyarrow.lib.Table.take\r\n File \"\/home\/yisiang\/miniconda3\/envs\/ml\/lib\/python3.7\/site-packages\/pyarrow\/compute.py\", line 268, in take\r\n return call_function('take', [data, indices], options)\r\n File \"pyarrow\/_compute.pyx\", line 298, in pyarrow._compute.call_function\r\n File \"pyarrow\/_compute.pyx\", line 192, in pyarrow._compute.Function.call\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: offset overflow while concatenating arrays\r\n\"\"\"\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nArrowInvalid Traceback (most recent call last)\r\n in \r\n 30 owt = datasets.load_dataset('\/home\/yisiang\/datasets\/datasets\/openwebtext\/openwebtext.py', cache_dir='.\/datasets')['train']\r\n 31 print('load\/create data from OpenWebText Corpus for ELECTRA')\r\n---> 32 e_owt = ELECTRAProcessor(owt, apply_cleaning=False).map(cache_file_name=f\"electra_owt_{c.max_length}.arrow\")\r\n 33 dsets.append(e_owt)\r\n 34 \r\n\r\n~\/Reexamine_Attention\/electra_pytorch\/_utils\/utils.py in map(self, **kwargs)\r\n 126 writer_batch_size=10**4,\r\n 127 num_proc=num_proc,\r\n--> 128 **kwargs\r\n 129 )\r\n 130 \r\n\r\n~\/hugdatafast\/hugdatafast\/transform.py in my_map(self, *args, **kwargs)\r\n 21 if not cache_file_name.endswith('.arrow'): cache_file_name += '.arrow'\r\n 22 if '\/' not in cache_file_name: cache_file_name = os.path.join(self.cache_directory(), cache_file_name)\r\n---> 23 return self.map(*args, cache_file_name=cache_file_name, **kwargs)\r\n 24 \r\n 25 @patch\r\n\r\n~\/datasets\/src\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)\r\n 1285 logger.info(\"Spawning {} processes\".format(num_proc))\r\n 1286 results = [pool.apply_async(self.__class__._map_single, kwds=kwds) for kwds in kwds_per_shard]\r\n-> 1287 transformed_shards = [r.get() for r in results]\r\n 1288 logger.info(\"Concatenating {} shards from multiprocessing\".format(num_proc))\r\n 1289 result = concatenate_datasets(transformed_shards)\r\n\r\n~\/datasets\/src\/datasets\/arrow_dataset.py in (.0)\r\n 1285 logger.info(\"Spawning {} processes\".format(num_proc))\r\n 1286 results = [pool.apply_async(self.__class__._map_single, kwds=kwds) for kwds in kwds_per_shard]\r\n-> 1287 transformed_shards = [r.get() for r in results]\r\n 1288 logger.info(\"Concatenating {} shards from multiprocessing\".format(num_proc))\r\n 1289 result = concatenate_datasets(transformed_shards)\r\n\r\n~\/miniconda3\/envs\/ml\/lib\/python3.7\/multiprocessing\/pool.py in get(self, timeout)\r\n 655 return self._value\r\n 656 else:\r\n--> 657 raise self._value\r\n 658 \r\n 659 def _set(self, i, obj):\r\n\r\nArrowInvalid: offset overflow while concatenating arrays\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/648\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/648\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/647","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/647\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/647\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/647\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/647","id":704734764,"node_id":"MDU6SXNzdWU3MDQ3MzQ3NjQ=","number":647,"title":"Cannot download dataset_info.json","user":{"login":"chiyuzhang94","id":33407613,"node_id":"MDQ6VXNlcjMzNDA3NjEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33407613?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chiyuzhang94","html_url":"https:\/\/github.com\/chiyuzhang94","followers_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/followers","following_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/orgs","repos_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/repos","events_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-09-19T01:35:15Z","updated_at":"2020-09-21T08:28:42Z","closed_at":"2020-09-21T08:28:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am running my job on a cloud server where does not provide for connections from the standard compute nodes to outside resources. Hence, when I use `dataset.load_dataset()` to load data, I got an error like this:\r\n\r\n```\r\nConnectionError: Couldn't reach https:\/\/storage.googleapis.com\/huggingface-nlp\/cache\/datasets\/text\/default-53ee3045f07ba8ca\/0.0.0\/dataset_info.json\r\n```\r\n\r\nI tried to open this link manually, but I cannot access this file. How can I download this file and pass it through `dataset.load_dataset()` manually?\r\n\r\nVersions:\r\nPython version 3.7.3\r\nPyTorch version 1.6.0\r\nTensorFlow version 2.3.0\r\ndatasets version: 1.0.1 \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/647\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/647\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/646","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/646\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/646\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/646\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/646","id":704607371,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg5NTAyMTM3","number":646,"title":"Fix docs typos","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-18T19:32:27Z","updated_at":"2020-09-21T16:30:54Z","closed_at":"2020-09-21T16:14:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/646","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/646","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/646.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/646.patch","merged_at":"2020-09-21T16:14:12Z"},"body":"This PR fixes few typos in the docs and the error in the code snippet in the set_format section in docs\/source\/torch_tensorflow.rst. `torch.utils.data.Dataloader` expects padded batches so it throws an error due to not being able to stack the unpadded tensors. If we follow the Quick tour from the docs where they add the `truncation=True, padding='max_length'` arguments to the tokenizer before passing data to Dataloader, we can easily fix the issue. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/646\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/646\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/645","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/645\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/645\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/645\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/645","id":704542234,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg5NDQ5MjAx","number":645,"title":"Don't use take on dataset table in pyarrow 1.0.x","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-09-18T17:31:34Z","updated_at":"2020-09-19T16:46:32Z","closed_at":"2020-09-19T16:46:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/645","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/645","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/645.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/645.patch","merged_at":"2020-09-19T16:46:31Z"},"body":"Fix #615 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/645\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/645\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/644","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/644\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/644\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/644\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/644","id":704534501,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg5NDQzMTk1","number":644,"title":"Better windows support","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-18T17:17:36Z","updated_at":"2020-09-25T14:02:30Z","closed_at":"2020-09-25T14:02:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/644","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/644","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/644.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/644.patch","merged_at":"2020-09-25T14:02:28Z"},"body":"There are a few differences in the behavior of python and pyarrow on windows.\r\n\r\nFor example there are restrictions when accessing\/deleting files that are open\r\n\r\nFix #590 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/644\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/644\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/643","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/643\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/643\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/643\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/643","id":704477164,"node_id":"MDU6SXNzdWU3MDQ0NzcxNjQ=","number":643,"title":"Caching processed dataset at wrong folder","user":{"login":"mrm8488","id":3653789,"node_id":"MDQ6VXNlcjM2NTM3ODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3653789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mrm8488","html_url":"https:\/\/github.com\/mrm8488","followers_url":"https:\/\/api.github.com\/users\/mrm8488\/followers","following_url":"https:\/\/api.github.com\/users\/mrm8488\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mrm8488\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mrm8488\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mrm8488\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mrm8488\/orgs","repos_url":"https:\/\/api.github.com\/users\/mrm8488\/repos","events_url":"https:\/\/api.github.com\/users\/mrm8488\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mrm8488\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2020-09-18T15:41:26Z","updated_at":"2020-09-28T16:14:40Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi guys, I run this on my Colab (PRO):\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('text', data_files='\/content\/corpus.txt', cache_dir='\/content\/drive\/My Drive', split='train')\r\n\r\ndef encode(examples):\r\n return tokenizer(examples['text'], truncation=True, padding='max_length')\r\n\r\ndataset = dataset.map(encode, batched=True)\r\n```\r\nThe file is about 4 GB, so I cannot process it on the Colab HD because there is no enough space. So I decided to mount my Google Drive fs and do it on it.\r\nThe dataset is cached in the right place but by processing it (applying `encode` function) seems to use a different folder because Colab HD starts to grow and it crashes when it should be done in the Drive fs.\r\n\r\nWhat gets me crazy, it prints it is processing\/encoding the dataset in the right folder:\r\n```\r\nTesting the mapped function outputs\r\nTesting finished, running the mapping function on the dataset\r\nCaching processed dataset at \/content\/drive\/My Drive\/text\/default-ad3e69d6242ee916\/0.0.0\/7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7\/cache-b16341780a59747d.arrow\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/643\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/643\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/642","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/642\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/642\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/642\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/642","id":704397499,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg5MzMwMDAx","number":642,"title":"Rename wnut fields","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-18T13:51:31Z","updated_at":"2020-09-18T17:18:31Z","closed_at":"2020-09-18T17:18:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/642","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/642","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/642.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/642.patch","merged_at":"2020-09-18T17:18:30Z"},"body":"As mentioned in #641 it would be cool to have it follow the naming of the other NER datasets","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/642\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/642\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/641","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/641\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/641\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/641\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/641","id":704373940,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg5MzExOTU3","number":641,"title":"Add Polyglot-NER Dataset","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-09-18T13:21:44Z","updated_at":"2020-09-20T03:04:43Z","closed_at":"2020-09-20T03:04:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/641","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/641","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/641.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/641.patch","merged_at":"2020-09-20T03:04:43Z"},"body":"Adds the [Polyglot-NER dataset](https:\/\/sites.google.com\/site\/rmyeid\/projects\/polylgot-ner) with named entity tags for 40 languages. I include separate configs for each language as well as a `combined` config which lumps them all together.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/641\/reactions","total_count":6,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":2,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/641\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/640","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/640\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/640\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/640\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/640","id":704311758,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg5MjYwNTc1","number":640,"title":"Make shuffle compatible with temp_seed","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-18T11:38:58Z","updated_at":"2020-09-18T11:47:51Z","closed_at":"2020-09-18T11:47:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/640","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/640","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/640.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/640.patch","merged_at":"2020-09-18T11:47:50Z"},"body":"This code used to return different dataset at each run\r\n```python\r\nimport dataset as ds\r\n\r\ndataset = ...\r\n\r\nwith ds.temp_seed(42):\r\n shuffled = dataset.shuffle()\r\n```\r\n\r\nNow it returns the same one since the seed is set","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/640\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/640\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/639","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/639\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/639\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/639\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/639","id":704217963,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg5MTgxOTY3","number":639,"title":"Update glue QQP checksum","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-18T09:08:15Z","updated_at":"2020-09-18T11:37:08Z","closed_at":"2020-09-18T11:37:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/639","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/639","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/639.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/639.patch","merged_at":"2020-09-18T11:37:07Z"},"body":"Fix #638 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/639\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/639\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/638","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/638\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/638\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/638\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/638","id":704146956,"node_id":"MDU6SXNzdWU3MDQxNDY5NTY=","number":638,"title":"GLUE\/QQP dataset: NonMatchingChecksumError","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-18T07:09:10Z","updated_at":"2020-09-18T11:37:07Z","closed_at":"2020-09-18T11:37:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi @lhoestq , I know you are busy and there are also other important issues. But if this is easy to be fixed, I am shamelessly wondering if you can give me some help , so I can evaluate my models and restart with my developing cycle asap. \ud83d\ude1a\r\n\r\ndatasets version: editable install of master at 9\/17\r\n\r\n`datasets.load_dataset('glue','qqp', cache_dir='.\/datasets')`\r\n\r\n```\r\nDownloading and preparing dataset glue\/qqp (download: 57.73 MiB, generated: 107.02 MiB, post-processed: Unknown size, total: 164.75 MiB) to .\/datasets\/glue\/qqp\/1.0.0\/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4...\r\n---------------------------------------------------------------------------\r\nNonMatchingChecksumError Traceback (most recent call last)\r\n in \r\n----> 1 datasets.load_dataset('glue','qqp', cache_dir='.\/datasets')\r\n\r\n~\/datasets\/src\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, save_infos, script_version, **config_kwargs)\r\n 609 download_config=download_config,\r\n 610 download_mode=download_mode,\r\n--> 611 ignore_verifications=ignore_verifications,\r\n 612 )\r\n 613 \r\n\r\n~\/datasets\/src\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n 467 if not downloaded_from_gcs:\r\n 468 self._download_and_prepare(\r\n--> 469 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 470 )\r\n 471 # Sync info\r\n\r\n~\/datasets\/src\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 527 if verify_infos:\r\n 528 verify_checksums(\r\n--> 529 self.info.download_checksums, dl_manager.get_recorded_sizes_checksums(), \"dataset source files\"\r\n 530 )\r\n 531 \r\n\r\n~\/datasets\/src\/datasets\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 37 if len(bad_urls) > 0:\r\n 38 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 39 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 40 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 41 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/dl.fbaipublicfiles.com\/glue\/data\/QQP-clean.zip']\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/638\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/638\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/637","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/637\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/637\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/637\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/637","id":703539909,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg4NjMwNzk4","number":637,"title":"Add MATINF","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-17T12:24:53Z","updated_at":"2020-09-17T13:23:18Z","closed_at":"2020-09-17T13:23:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/637","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/637","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/637.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/637.patch","merged_at":"2020-09-17T13:23:17Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/637\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/637\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/636","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/636\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/636\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/636\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/636","id":702883989,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg4MDg3OTA5","number":636,"title":"Consistent ner features","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-16T15:56:25Z","updated_at":"2020-09-17T09:52:59Z","closed_at":"2020-09-17T09:52:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/636","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/636","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/636.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/636.patch","merged_at":"2020-09-17T09:52:58Z"},"body":"As discussed in #613 , this PR aims at making NER feature names consistent across datasets.\r\n\r\nI changed the feature names of LinCE and XTREME\/PAN-X","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/636\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/636\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/635","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/635\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/635\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/635\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/635","id":702822439,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg4MDM2OTE5","number":635,"title":"Loglevel","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-16T14:37:53Z","updated_at":"2020-09-17T09:52:19Z","closed_at":"2020-09-17T09:52:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/635","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/635","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/635.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/635.patch","merged_at":"2020-09-17T09:52:18Z"},"body":"Continuation of #618 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/635\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/635\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/634","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/634\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/634\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/634\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/634","id":702676041,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg3OTEzOTk4","number":634,"title":"Add ConLL-2000 dataset","user":{"login":"vblagoje","id":458335,"node_id":"MDQ6VXNlcjQ1ODMzNQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/458335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vblagoje","html_url":"https:\/\/github.com\/vblagoje","followers_url":"https:\/\/api.github.com\/users\/vblagoje\/followers","following_url":"https:\/\/api.github.com\/users\/vblagoje\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vblagoje\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vblagoje\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vblagoje\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vblagoje\/orgs","repos_url":"https:\/\/api.github.com\/users\/vblagoje\/repos","events_url":"https:\/\/api.github.com\/users\/vblagoje\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vblagoje\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-16T11:14:11Z","updated_at":"2020-09-17T10:38:10Z","closed_at":"2020-09-17T10:38:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/634","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/634","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/634.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/634.patch","merged_at":"2020-09-17T10:38:10Z"},"body":"Adds ConLL-2000 dataset used for text chunking. See https:\/\/www.clips.uantwerpen.be\/conll2000\/chunking\/ for details and [motivation](https:\/\/github.com\/huggingface\/transformers\/pull\/7041#issuecomment-692710948) behind this PR","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/634\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/634\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/633","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/633\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/633\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/633\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/633","id":702440484,"node_id":"MDU6SXNzdWU3MDI0NDA0ODQ=","number":633,"title":"Load large text file for LM pre-training resulting in OOM","user":{"login":"leethu2012","id":29704017,"node_id":"MDQ6VXNlcjI5NzA0MDE3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29704017?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/leethu2012","html_url":"https:\/\/github.com\/leethu2012","followers_url":"https:\/\/api.github.com\/users\/leethu2012\/followers","following_url":"https:\/\/api.github.com\/users\/leethu2012\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/leethu2012\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/leethu2012\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/leethu2012\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/leethu2012\/orgs","repos_url":"https:\/\/api.github.com\/users\/leethu2012\/repos","events_url":"https:\/\/api.github.com\/users\/leethu2012\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/leethu2012\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":27,"created_at":"2020-09-16T04:33:15Z","updated_at":"2021-02-16T12:02:01Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I tried to pretrain Longformer using transformers and datasets. But I got OOM issues with loading a large text file. My script is almost like this:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n@dataclass\r\nclass DataCollatorForDatasetsLanguageModeling(DataCollatorForLanguageModeling):\r\n \"\"\"\r\n Data collator used for language modeling based on DataCollatorForLazyLanguageModeling\r\n - collates batches of tensors, honoring their tokenizer's pad_token\r\n - preprocesses batches for masked language modeling\r\n \"\"\"\r\n\r\n block_size: int = 512\r\n\r\n def __call__(self, examples: List[dict]) -> Dict[str, torch.Tensor]:\r\n examples = [example['text'] for example in examples]\r\n batch, attention_mask = self._tensorize_batch(examples)\r\n if self.mlm:\r\n inputs, labels = self.mask_tokens(batch)\r\n return {\"input_ids\": inputs, \"labels\": labels}\r\n else:\r\n labels = batch.clone().detach()\r\n if self.tokenizer.pad_token_id is not None:\r\n labels[labels == self.tokenizer.pad_token_id] = -100\r\n return {\"input_ids\": batch, \"labels\": labels}\r\n\r\n def _tensorize_batch(self, examples: List[str]) -> Tuple[torch.Tensor, torch.Tensor]:\r\n\r\n if self.tokenizer._pad_token is None:\r\n raise ValueError(\r\n \"You are attempting to pad samples but the tokenizer you are using\"\r\n f\" ({self.tokenizer.__class__.__name__}) does not have one.\"\r\n )\r\n\r\n tensor_examples = self.tokenizer.batch_encode_plus(\r\n [ex for ex in examples if ex],\r\n max_length=self.block_size,\r\n return_tensors=\"pt\",\r\n pad_to_max_length=True,\r\n return_attention_mask=True,\r\n truncation=True,\r\n )\r\n\r\n input_ids, attention_mask = tensor_examples[\"input_ids\"], tensor_examples[\"attention_mask\"]\r\n return input_ids, attention_mask\r\n\r\ndataset = load_dataset('text', data_files='train.txt',cache_dir=\".\/\", , split='train')\r\ndata_collator = DataCollatorForDatasetsLanguageModeling(tokenizer=tokenizer, mlm=True, \r\n mlm_probability=0.15, block_size=tokenizer.max_len)\r\ntrainer = Trainer(model=model, args=args, data_collator=data_collator,\r\n train_dataset=train_dataset, prediction_loss_only=True, )\r\ntrainer.train(model_path=model_path)\r\n```\r\nThis train.txt is about 1.1GB and has 90k lines where each line is a sequence of 4k words. \r\nDuring training, the memory usage increased fast as the following graph and resulted in OOM before the finish of training.\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/29704017\/93292112-5576b280-f817-11ea-8da2-b2db9bf35665.png)\r\n\r\nCould you please give me any suggestions on why this happened and how to fix it?\r\nThanks. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/633\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/633\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/632","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/632\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/632\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/632\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/632","id":702358124,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg3NjQ5OTQ2","number":632,"title":"Fix typos in the loading datasets docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-16T00:27:41Z","updated_at":"2020-09-21T16:31:11Z","closed_at":"2020-09-16T06:52:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/632","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/632","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/632.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/632.patch","merged_at":"2020-09-16T06:52:44Z"},"body":"This PR fixes two typos in the loading datasets docs, one of them being a broken link to the `load_dataset` function.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/632\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/632\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/631","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/631\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/631\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/631\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/631","id":701711255,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg3MTE3OTA0","number":631,"title":"Fix text delimiter","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-09-15T08:08:42Z","updated_at":"2020-09-22T15:03:06Z","closed_at":"2020-09-15T08:26:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/631","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/631","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/631.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/631.patch","merged_at":"2020-09-15T08:26:25Z"},"body":"I changed the delimiter in the `text` dataset script.\r\nIt should fix the `pyarrow.lib.ArrowInvalid: CSV parse error` from #622 \r\n\r\nI changed the delimiter to an unused ascii character that is not present in text files : `\\b`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/631\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/631\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/630","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/630\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/630\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/630\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/630","id":701636350,"node_id":"MDU6SXNzdWU3MDE2MzYzNTA=","number":630,"title":"Text dataset not working with large files","user":{"login":"ksjae","id":17930170,"node_id":"MDQ6VXNlcjE3OTMwMTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17930170?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ksjae","html_url":"https:\/\/github.com\/ksjae","followers_url":"https:\/\/api.github.com\/users\/ksjae\/followers","following_url":"https:\/\/api.github.com\/users\/ksjae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ksjae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ksjae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ksjae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ksjae\/orgs","repos_url":"https:\/\/api.github.com\/users\/ksjae\/repos","events_url":"https:\/\/api.github.com\/users\/ksjae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ksjae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-09-15T06:02:36Z","updated_at":"2020-09-25T22:21:43Z","closed_at":"2020-09-25T22:21:43Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nTraceback (most recent call last):\r\n File \"examples\/language-modeling\/run_language_modeling.py\", line 333, in \r\n main()\r\n File \"examples\/language-modeling\/run_language_modeling.py\", line 262, in main\r\n get_dataset(data_args, tokenizer=tokenizer, cache_dir=model_args.cache_dir) if training_args.do_train else None\r\n File \"examples\/language-modeling\/run_language_modeling.py\", line 144, in get_dataset\r\n dataset = load_dataset(\"text\", data_files=file_path, split='train+test')\r\n File \"\/home\/ksjae\/.local\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 611, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/home\/ksjae\/.local\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 469, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/ksjae\/.local\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 546, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/ksjae\/.local\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 888, in _prepare_split\r\n for key, table in utils.tqdm(generator, unit=\" tables\", leave=False, disable=not_verbose):\r\n File \"\/home\/ksjae\/.local\/lib\/python3.7\/site-packages\/tqdm\/std.py\", line 1129, in __iter__\r\n for obj in iterable:\r\n File \"\/home\/ksjae\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/text\/7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7\/text.py\", line 104, in _generate_tables\r\n convert_options=self.config.convert_options,\r\n File \"pyarrow\/_csv.pyx\", line 714, in pyarrow._csv.read_csv\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\n```\r\n\r\n**pyarrow.lib.ArrowInvalid: straddling object straddles two block boundaries (try to increase block size?)**\r\n\r\nIt gives the same message for both 200MB, 10GB .tx files but not for 700MB file.\r\nCan't upload due to size & copyright problem. sorry.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/630\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/630\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/629","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/629\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/629\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/629\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/629","id":701517550,"node_id":"MDU6SXNzdWU3MDE1MTc1NTA=","number":629,"title":"straddling object straddles two block boundaries","user":{"login":"bharaniabhishek123","id":17970177,"node_id":"MDQ6VXNlcjE3OTcwMTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17970177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bharaniabhishek123","html_url":"https:\/\/github.com\/bharaniabhishek123","followers_url":"https:\/\/api.github.com\/users\/bharaniabhishek123\/followers","following_url":"https:\/\/api.github.com\/users\/bharaniabhishek123\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bharaniabhishek123\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bharaniabhishek123\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bharaniabhishek123\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bharaniabhishek123\/orgs","repos_url":"https:\/\/api.github.com\/users\/bharaniabhishek123\/repos","events_url":"https:\/\/api.github.com\/users\/bharaniabhishek123\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bharaniabhishek123\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-15T00:30:46Z","updated_at":"2020-09-15T00:36:17Z","closed_at":"2020-09-15T00:32:17Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to read json data (it's an array with lots of dictionaries) and getting block boundaries issue as below : \r\n\r\nI tried calling read_json with readOptions but no luck .\r\n\r\n```\r\ntable = json.read_json(fn)\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"pyarrow\/_json.pyx\", line 246, in pyarrow._json.read_json\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: straddling object straddles two block boundaries (try to increase block size?)\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/629\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/629\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/628","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/628\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/628\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/628\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/628","id":701496053,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg2OTQyNzgx","number":628,"title":"Update docs links in the contribution guideline","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-14T23:27:19Z","updated_at":"2020-11-02T21:03:23Z","closed_at":"2020-09-15T06:19:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/628","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/628","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/628.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/628.patch","merged_at":"2020-09-15T06:19:35Z"},"body":"Fixed the `add a dataset` and `share a dataset` links in the contribution guideline to refer to the new docs website.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/628\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/628\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/627","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/627\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/627\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/627\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/627","id":701411661,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg2ODcxMTg2","number":627,"title":"fix (#619) MLQA features names","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-14T20:41:59Z","updated_at":"2020-11-02T21:04:32Z","closed_at":"2020-09-16T06:54:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/627","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/627","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/627.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/627.patch","merged_at":"2020-09-16T06:54:11Z"},"body":"Fixed the features names as suggested in (#619) in the `_generate_examples` and `_info` methods in the MLQA loading script and also changed the names in the `dataset_infos.json` file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/627\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/627\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/626","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/626\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/626\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/626\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/626","id":701352605,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg2ODIzMTY1","number":626,"title":"Update GLUE URLs (now hosted on FB)","user":{"login":"jeswan","id":57466294,"node_id":"MDQ6VXNlcjU3NDY2Mjk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57466294?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jeswan","html_url":"https:\/\/github.com\/jeswan","followers_url":"https:\/\/api.github.com\/users\/jeswan\/followers","following_url":"https:\/\/api.github.com\/users\/jeswan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jeswan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jeswan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jeswan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jeswan\/orgs","repos_url":"https:\/\/api.github.com\/users\/jeswan\/repos","events_url":"https:\/\/api.github.com\/users\/jeswan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jeswan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-14T19:05:39Z","updated_at":"2020-09-16T06:53:18Z","closed_at":"2020-09-16T06:53:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/626","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/626","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/626.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/626.patch","merged_at":"2020-09-16T06:53:18Z"},"body":"NYU is switching dataset hosting from Google to FB. This PR closes https:\/\/github.com\/huggingface\/datasets\/issues\/608 and is necessary for https:\/\/github.com\/jiant-dev\/jiant\/issues\/161. This PR updates the data URLs based on changes made in https:\/\/github.com\/nyu-mll\/jiant\/pull\/1112.\r\n\r\nNote: rebased on huggingface\/datasets","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/626\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/626\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/625","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/625\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/625\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/625\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/625","id":701057799,"node_id":"MDU6SXNzdWU3MDEwNTc3OTk=","number":625,"title":"dtype of tensors should be preserved","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-09-14T12:38:05Z","updated_at":"2021-08-17T08:30:04Z","closed_at":"2021-08-17T08:30:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After switching to `datasets` my model just broke. After a weekend of debugging, the issue was that my model could not handle the double that the Dataset provided, as it expected a float (but didn't give a warning, which seems a [PyTorch issue](https:\/\/discuss.pytorch.org\/t\/is-it-required-that-input-and-hidden-for-gru-have-the-same-dtype-float32\/96221)). \r\n\r\nAs a user I did not expect this bug. I have a `map` function that I call on the Dataset that looks like this:\r\n\r\n```python\r\ndef preprocess(sentences: List[str]):\r\n token_ids = [[vocab.to_index(t) for t in s.split()] for s in sentences]\r\n\r\n sembeddings = stransformer.encode(sentences)\r\n print(sembeddings.dtype)\r\n return {\"input_ids\": token_ids, \"sembedding\": sembeddings}\r\n```\r\n\r\nGiven a list of `sentences` (`List[str]`), it converts those into token_ids on the one hand (list of lists of ints; `List[List[int]]`) and into sentence embeddings on the other (Tensor of dtype `torch.float32`). That means that I actually set the column \"sembedding\" to a tensor that I as a user expect to be a float32.\r\n\r\nIt appears though that behind the scenes, this tensor is converted into a **list**. I did not find this documented anywhere but I might have missed it. From a user's perspective this is incredibly important though, because it means you cannot do any data_type or tensor casting yourself in a mapping function! Furthermore, this can lead to issues, as was my case. \r\n\r\nMy model expected float32 precision, which I thought `sembedding` was because that is what `stransformer.encode` outputs. But behind the scenes this tensor is first cast to a list, and when we then set its format, as below, this column is cast not to float32 but to double precision float64.\r\n\r\n```python\r\ndataset.set_format(type=\"torch\", columns=[\"input_ids\", \"sembedding\"])\r\n```\r\n\r\nThis happens because apparently there is an intermediate step of casting to a **numpy** array (?) **whose dtype creation\/deduction is different from torch dtypes** (see the snippet below). As you can see, this means that the dtype is not preserved: if I got it right, the dataset goes from torch.float32 -> list -> float64 (numpy) -> torch.float64. \r\n\r\n```python\r\nimport torch\r\nimport numpy as np\r\n\r\nl = [-0.03010837361216545, -0.035979013890028, -0.016949838027358055]\r\ntorch_tensor = torch.tensor(l)\r\nnp_array = np.array(l)\r\nnp_to_torch = torch.from_numpy(np_array)\r\n\r\nprint(torch_tensor.dtype)\r\n# torch.float32\r\nprint(np_array.dtype)\r\n# float64\r\nprint(np_to_torch.dtype)\r\n# torch.float64\r\n```\r\n\r\nThis might lead to unwanted behaviour. I understand that the whole library is probably built around casting from numpy to other frameworks, so this might be difficult to solve. Perhaps `set_format` should include a `dtypes` option where for each input column the user can specify the wanted precision.\r\n\r\nThe alternative is that the user needs to cast manually after loading data from the dataset but that does not seem user-friendly, makes the dataset less portable, and might use more space in memory as well as on disk than is actually needed.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/625\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/625\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/624","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/624\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/624\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/624\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/624","id":700541628,"node_id":"MDU6SXNzdWU3MDA1NDE2Mjg=","number":624,"title":"Add learningq dataset","user":{"login":"krrishdholakia","id":17561003,"node_id":"MDQ6VXNlcjE3NTYxMDAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17561003?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/krrishdholakia","html_url":"https:\/\/github.com\/krrishdholakia","followers_url":"https:\/\/api.github.com\/users\/krrishdholakia\/followers","following_url":"https:\/\/api.github.com\/users\/krrishdholakia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/krrishdholakia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/krrishdholakia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/krrishdholakia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/krrishdholakia\/orgs","repos_url":"https:\/\/api.github.com\/users\/krrishdholakia\/repos","events_url":"https:\/\/api.github.com\/users\/krrishdholakia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/krrishdholakia\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-13T10:20:27Z","updated_at":"2020-09-14T09:50:02Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, \r\n\r\nThank you again for this amazing repo. \r\n\r\nWould it be possible for y'all to add the LearningQ dataset - https:\/\/github.com\/AngusGLChen\/LearningQ ? \r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/624\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/624\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/623","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/623\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/623\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/623\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/623","id":700235308,"node_id":"MDU6SXNzdWU3MDAyMzUzMDg=","number":623,"title":"Custom feature types in `load_dataset` from CSV","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-09-12T13:21:34Z","updated_at":"2020-09-30T19:51:43Z","closed_at":"2020-09-30T08:39:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to load a local file with the `load_dataset` function and I want to predefine the feature types with the `features` argument. However, the types are always the same independent of the value of `features`. \r\n\r\nI am working with the local files from the emotion dataset. To get the data you can use the following code:\r\n\r\n```Python\r\nfrom pathlib import Path\r\nimport wget\r\n\r\nEMOTION_PATH = Path(\".\/data\/emotion\")\r\nDOWNLOAD_URLS = [\r\n \"https:\/\/www.dropbox.com\/s\/1pzkadrvffbqw6o\/train.txt?dl=1\",\r\n \"https:\/\/www.dropbox.com\/s\/2mzialpsgf9k5l3\/val.txt?dl=1\",\r\n \"https:\/\/www.dropbox.com\/s\/ikkqxfdbdec3fuj\/test.txt?dl=1\",\r\n]\r\n\r\nif not Path.is_dir(EMOTION_PATH):\r\n Path.mkdir(EMOTION_PATH)\r\nfor url in DOWNLOAD_URLS:\r\n wget.download(url, str(EMOTION_PATH))\r\n```\r\n\r\nThe first five lines of the train set are:\r\n```\r\ni didnt feel humiliated;sadness\r\ni can go from feeling so hopeless to so damned hopeful just from being around someone who cares and is awake;sadness\r\nim grabbing a minute to post i feel greedy wrong;anger\r\ni am ever feeling nostalgic about the fireplace i will know that it is still on the property;love\r\ni am feeling grouchy;anger\r\n```\r\n\r\nHere the code to reproduce the issue:\r\n```Python\r\nfrom datasets import Features, Value, ClassLabel, load_dataset\r\n\r\nclass_names = [\"sadness\", \"joy\", \"love\", \"anger\", \"fear\", \"surprise\"]\r\nemotion_features = Features({'text': Value('string'), 'label': ClassLabel(names=class_names)})\r\nfile_dict = {'train': EMOTION_PATH\/'train.txt'}\r\n\r\ndataset = load_dataset('csv', data_files=file_dict, delimiter=';', column_names=['text', 'label'], features=emotion_features)\r\n```\r\n\r\n**Observed behaviour:**\r\n```Python\r\ndataset['train'].features\r\n```\r\n```Python\r\n{'text': Value(dtype='string', id=None),\r\n 'label': Value(dtype='string', id=None)}\r\n```\r\n**Expected behaviour:**\r\n```Python\r\ndataset['train'].features\r\n```\r\n```Python\r\n{'text': Value(dtype='string', id=None),\r\n 'label': ClassLabel(num_classes=6, names=['sadness', 'joy', 'love', 'anger', 'fear', 'surprise'], names_file=None, id=None)}\r\n```\r\n\r\n**Things I've tried:**\r\n- deleting the cache\r\n- trying other types such as `int64`\r\n\r\nAm I missing anything? Thanks for any pointer in the right direction.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/623\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/623\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/622","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/622\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/622\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/622\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/622","id":700225826,"node_id":"MDU6SXNzdWU3MDAyMjU4MjY=","number":622,"title":"load_dataset for text files not working","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":41,"created_at":"2020-09-12T12:49:28Z","updated_at":"2020-10-28T11:07:31Z","closed_at":"2020-10-28T11:07:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Trying the following snippet, I get different problems on Linux and Windows.\r\n\r\n\r\n```python\r\ndataset = load_dataset(\"text\", data_files=\"data.txt\")\r\n# or \r\ndataset = load_dataset(\"text\", data_files=[\"data.txt\"])\r\n```\r\n\r\n(ps [This example](https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#json-files) shows that you can use a string as input for data_files, but the signature is `Union[Dict, List]`.)\r\n\r\nThe problem on Linux is that the script crashes with a CSV error (even though it isn't a CSV file). On Windows the script just seems to freeze or get stuck after loading the config file.\r\n\r\nLinux stack trace:\r\n```\r\nPyTorch version 1.6.0+cu101 available.\r\nChecking \/home\/bram\/.cache\/huggingface\/datasets\/b1d50a0e74da9a7b9822cea8ff4e4f217dd892e09eb14f6274a2169e5436e2ea.30c25842cda32b0540d88b7195147decf9671ee442f4bc2fb6ad74016852978e.py for additional imports.\r\nFound main folder for dataset https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.1\/datasets\/text\/text.py at \/home\/bram\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/text\r\nFound specific version folder for dataset https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.1\/datasets\/text\/text.py at \/home\/bram\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/text\/7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7\r\nFound script file from https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.1\/datasets\/text\/text.py to \/home\/bram\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/text\/7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7\/text.py\r\nCouldn't find dataset infos file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.1\/datasets\/text\/dataset_infos.json\r\nFound metadata file for dataset https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.1\/datasets\/text\/text.py at \/home\/bram\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/text\/7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7\/text.json\r\nUsing custom data configuration default\r\nGenerating dataset text (\/home\/bram\/.cache\/huggingface\/datasets\/text\/default-0907112cc6cd2a38\/0.0.0\/7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7)\r\nDownloading and preparing dataset text\/default-0907112cc6cd2a38 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/bram\/.cache\/huggingface\/datasets\/text\/default-0907112cc6cd2a38\/0.0.0\/7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7...\r\nDataset not on Hf google storage. Downloading and preparing it from source\r\nDownloading took 0.0 min\r\nChecksum Computation took 0.0 min\r\nUnable to verify checksums.\r\nGenerating split train\r\nTraceback (most recent call last):\r\n File \"\/home\/bram\/Python\/projects\/dutch-simplification\/utils.py\", line 45, in prepare_data\r\n dataset = load_dataset(\"text\", data_files=dataset_f)\r\n File \"\/home\/bram\/.local\/share\/virtualenvs\/dutch-simplification-NcpPZtDF\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 608, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/bram\/.local\/share\/virtualenvs\/dutch-simplification-NcpPZtDF\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 468, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/bram\/.local\/share\/virtualenvs\/dutch-simplification-NcpPZtDF\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 546, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/bram\/.local\/share\/virtualenvs\/dutch-simplification-NcpPZtDF\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 888, in _prepare_split\r\n for key, table in utils.tqdm(generator, unit=\" tables\", leave=False, disable=not_verbose):\r\n File \"\/home\/bram\/.local\/share\/virtualenvs\/dutch-simplification-NcpPZtDF\/lib\/python3.8\/site-packages\/tqdm\/std.py\", line 1130, in __iter__\r\n for obj in iterable:\r\n File \"\/home\/bram\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/text\/7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7\/text.py\", line 100, in _generate_tables\r\n pa_table = pac.read_csv(\r\n File \"pyarrow\/_csv.pyx\", line 714, in pyarrow._csv.read_csv\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: CSV parse error: Expected 1 columns, got 2\r\n```\r\n\r\nWindows just seems to get stuck. Even with a tiny dataset of 10 lines, it has been stuck for 15 minutes already at this message:\r\n\r\n```\r\nChecking C:\\Users\\bramv\\.cache\\huggingface\\datasets\\b1d50a0e74da9a7b9822cea8ff4e4f217dd892e09eb14f6274a2169e5436e2ea.30c25842cda32b0540d88b7195147decf9671ee442f4bc2fb6ad74016852978e.py for additional imports.\r\nFound main folder for dataset https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.1\/datasets\/text\/text.py at C:\\Users\\bramv\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\text\r\nFound specific version folder for dataset https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.1\/datasets\/text\/text.py at C:\\Users\\bramv\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\text\\7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7\r\nFound script file from https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.1\/datasets\/text\/text.py to C:\\Users\\bramv\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\text\\7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7\\text.py\r\nCouldn't find dataset infos file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.1\/datasets\/text\\dataset_infos.json\r\nFound metadata file for dataset https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.0.1\/datasets\/text\/text.py at C:\\Users\\bramv\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\text\\7e13bc0fa76783d4ef197f079dc8acfe54c3efda980f2c9adfab046ede2f0ff7\\text.json\r\nUsing custom data configuration default\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/622\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/622\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/621","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/621\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/621\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/621\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/621","id":700171097,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg1ODQ3ODYz","number":621,"title":"[docs] Index: The native emoji looks kinda ugly in large size","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-12T09:48:40Z","updated_at":"2020-09-15T06:20:03Z","closed_at":"2020-09-15T06:20:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/621","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/621","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/621.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/621.patch","merged_at":"2020-09-15T06:20:02Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/621\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/621\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/620","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/620\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/620\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/620\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/620","id":699815135,"node_id":"MDU6SXNzdWU2OTk4MTUxMzU=","number":620,"title":"map\/filter multiprocessing raises errors and corrupts datasets","user":{"login":"timothyjlaurent","id":2000204,"node_id":"MDQ6VXNlcjIwMDAyMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2000204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timothyjlaurent","html_url":"https:\/\/github.com\/timothyjlaurent","followers_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/followers","following_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/orgs","repos_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/repos","events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":22,"created_at":"2020-09-11T22:30:06Z","updated_at":"2020-10-08T16:31:47Z","closed_at":"2020-10-08T16:31:46Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"After upgrading to the 1.0 started seeing errors in my data loading script after enabling multiprocessing.\r\n\r\n```python\r\n ...\r\n ner_ds_dict = ner_ds.train_test_split(test_size=test_pct, shuffle=True, seed=seed)\r\n ner_ds_dict[\"validation\"] = ner_ds_dict[\"test\"]\r\n rel_ds_dict = rel_ds.train_test_split(test_size=test_pct, shuffle=True, seed=seed)\r\n rel_ds_dict[\"validation\"] = rel_ds_dict[\"test\"]\r\n return ner_ds_dict, rel_ds_dict\r\n```\r\n\r\nThe first train_test_split, `ner_ds`\/`ner_ds_dict`, returns a `train` and `test` split that are iterable.\r\nThe second, `rel_ds`\/`rel_ds_dict` in this case, returns a Dataset dict that has rows but if selected from or sliced into into returns an empty dictionary. eg `rel_ds_dict['train'][0] == {}` and `rel_ds_dict['train'][0:100] == {}`.\r\n\r\nOk I think I know the problem -- the rel_ds was mapped though a mapper with `num_proc=12`. If I remove `num_proc`. The dataset loads.\r\n\r\nI also see errors with other map and filter functions when `num_proc` is set.\r\n\r\n```\r\nDone writing 67 indices in 536 bytes .\r\nDone writing 67 indices in 536 bytes .\r\nFatal Python error: PyCOND_WAIT(gil_cond) failed\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/620\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/620\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/619","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/619\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/619\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/619\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/619","id":699733612,"node_id":"MDU6SXNzdWU2OTk3MzM2MTI=","number":619,"title":"Mistakes in MLQA features names","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-11T20:46:23Z","updated_at":"2020-09-16T06:59:19Z","closed_at":"2020-09-16T06:59:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I think the following features in MLQA shouldn't be named the way they are:\r\n1. `questions` (should be `question`)\r\n2. `ids` (should be `id`)\r\n3. `start` (should be `answer_start`)\r\n\r\nThe reasons I'm suggesting these features be renamed are:\r\n* To make them consistent with other QA datasets like SQuAD, XQuAD, TyDiQA etc. and hence make it easier to concatenate multiple QA datasets.\r\n* The features names are not the same as the ones provided in the original MLQA datasets (it uses the names I suggested).\r\n\r\nI know these columns can be renamed using using `Dataset.rename_column_`, `questions` and `ids` can be easily renamed but `start` on the other hand is annoying to rename since it's nested inside the feature `answers`.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/619\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/619\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/618","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/618\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/618\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/618\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/618","id":699684831,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg1NDAxMzI5","number":618,"title":"sync logging utils with transformers","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2020-09-11T19:46:13Z","updated_at":"2020-09-17T15:40:59Z","closed_at":"2020-09-17T09:53:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/618","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/618","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/618.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/618.patch","merged_at":null},"body":"sync the docs\/code with the recent changes in transformers' `logging` utils:\r\n1. change the default level to `WARNING`\r\n2. add `DATASETS_VERBOSITY` env var\r\n3. expand docs","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/618\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/618\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/617","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/617\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/617\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/617\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/617","id":699472596,"node_id":"MDU6SXNzdWU2OTk0NzI1OTY=","number":617,"title":"Compare different Rouge implementations ","user":{"login":"ibeltagy","id":2287797,"node_id":"MDQ6VXNlcjIyODc3OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2287797?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ibeltagy","html_url":"https:\/\/github.com\/ibeltagy","followers_url":"https:\/\/api.github.com\/users\/ibeltagy\/followers","following_url":"https:\/\/api.github.com\/users\/ibeltagy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ibeltagy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ibeltagy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ibeltagy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ibeltagy\/orgs","repos_url":"https:\/\/api.github.com\/users\/ibeltagy\/repos","events_url":"https:\/\/api.github.com\/users\/ibeltagy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ibeltagy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-09-11T15:49:32Z","updated_at":"2021-03-31T17:28:33Z","closed_at":"2020-10-02T09:52:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I used RougeL implementation provided in `datasets` [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/metrics\/rouge\/rouge.py) and it gives numbers that match those reported in the pegasus paper but very different from those reported in other papers, [this](https:\/\/arxiv.org\/pdf\/1909.03186.pdf) for example.\r\nCan you make sure the google-research implementation you are using matches the official perl implementation? \r\nThere are a couple of python wrappers around the perl implementation, [this](https:\/\/pypi.org\/project\/pyrouge\/) has been commonly used, and [this](https:\/\/github.com\/pltrdy\/files2rouge) is used in fairseq). \r\nThere's also a python reimplementation [here](https:\/\/github.com\/pltrdy\/rouge) but its RougeL numbers are way off. \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/617\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/617\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/616","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/616\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/616\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/616\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/616","id":699462293,"node_id":"MDU6SXNzdWU2OTk0NjIyOTM=","number":616,"title":"UserWarning: The given NumPy array is not writeable, and PyTorch does not support non-writeable tensors","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":14,"created_at":"2020-09-11T15:39:16Z","updated_at":"2021-07-22T21:12:21Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying out the library and want to load in pickled data with `from_dict`. In that dict, one column `text` should be tokenized and the other (an embedding vector) should be retained. All other columns should be removed. When I eventually try to set the format for the columns with `set_format` I am getting this strange Userwarning without a stack trace:\r\n\r\n> Set __getitem__(key) output type to torch for ['input_ids', 'sembedding'] columns (when key is int or slice) and don't output other (un-formatted) columns.\r\n> C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\datasets\\arrow_dataset.py:835: UserWarning: The given NumPy array is not writeable, and PyTorch does not support non-writeable tensors. This means you can write to the underlying (supposedly non-writeable) NumPy array using the tensor. You may want to copy the array to protect its data or make it writeable before converting it to a tensor. This type of warning will be suppressed for the rest of this program. (Triggered internally at ..\\torch\\csrc\\utils\\tensor_numpy.cpp:141.)\r\n> return torch.tensor(x, **format_kwargs)\r\n\r\nThe first one might not be related to the warning, but it is odd that it is shown, too. It is unclear whether that is something that I should do or something that that the program is doing at that moment.\r\n\r\nSnippet:\r\n```\r\n dataset = Dataset.from_dict(torch.load(\"data\/dummy.pt.pt\"))\r\n print(dataset)\r\n tokenizer = AutoTokenizer.from_pretrained(\"bert-base-cased\")\r\n keys_to_retain = {\"input_ids\", \"sembedding\"}\r\n dataset = dataset.map(lambda example: tokenizer(example[\"text\"], padding='max_length'), batched=True)\r\n dataset.remove_columns_(set(dataset.column_names) - keys_to_retain)\r\n\r\n dataset.set_format(type=\"torch\", columns=[\"input_ids\", \"sembedding\"])\r\n dataloader = torch.utils.data.DataLoader(dataset, batch_size=2)\r\n\r\n print(next(iter(dataloader)))\r\n```\r\n\r\nPS: the input type for `remove_columns_` should probably be an Iterable rather than just a List.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/616\/reactions","total_count":4,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":4},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/616\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/615","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/615\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/615\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/615\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/615","id":699410773,"node_id":"MDU6SXNzdWU2OTk0MTA3NzM=","number":615,"title":"Offset overflow when slicing a big dataset with an array of indices in Pyarrow >= 1.0.0","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-11T14:50:38Z","updated_at":"2020-09-19T16:47:40Z","closed_at":"2020-09-19T16:46:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"How to reproduce:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nwiki = load_dataset(\"wikipedia\", \"20200501.en\", split=\"train\")\r\nwiki[[0]]\r\n\r\n---------------------------------------------------------------------------\r\nArrowInvalid Traceback (most recent call last)\r\n in \r\n----> 1 wikipedia[[0]]\r\n\r\n~\/Desktop\/hf\/nlp\/src\/datasets\/arrow_dataset.py in __getitem__(self, key)\r\n 1069 format_columns=self._format_columns,\r\n 1070 output_all_columns=self._output_all_columns,\r\n-> 1071 format_kwargs=self._format_kwargs,\r\n 1072 )\r\n 1073 \r\n\r\n~\/Desktop\/hf\/nlp\/src\/datasets\/arrow_dataset.py in _getitem(self, key, format_type, format_columns, output_all_columns, format_kwargs)\r\n 1037 )\r\n 1038 else:\r\n-> 1039 data_subset = self._data.take(indices_array)\r\n 1040 \r\n 1041 if format_type is not None:\r\n\r\n~\/.virtualenvs\/hf-datasets\/lib\/python3.7\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.take()\r\n\r\n~\/.virtualenvs\/hf-datasets\/lib\/python3.7\/site-packages\/pyarrow\/compute.py in take(data, indices, boundscheck)\r\n 266 \"\"\"\r\n 267 options = TakeOptions(boundscheck)\r\n--> 268 return call_function('take', [data, indices], options)\r\n 269 \r\n 270 \r\n\r\n~\/.virtualenvs\/hf-datasets\/lib\/python3.7\/site-packages\/pyarrow\/_compute.pyx in pyarrow._compute.call_function()\r\n\r\n~\/.virtualenvs\/hf-datasets\/lib\/python3.7\/site-packages\/pyarrow\/_compute.pyx in pyarrow._compute.Function.call()\r\n\r\n~\/.virtualenvs\/hf-datasets\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/.virtualenvs\/hf-datasets\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowInvalid: offset overflow while concatenating arrays\r\n```\r\n\r\nIt seems to work fine with small datasets or with pyarrow 0.17.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/615\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/615\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/614","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/614\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/614\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/614\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/614","id":699177110,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg0OTQ2MzA1","number":614,"title":"[doc] Update deploy.sh","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-11T11:06:13Z","updated_at":"2020-09-14T08:49:19Z","closed_at":"2020-09-14T08:49:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/614","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/614","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/614.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/614.patch","merged_at":"2020-09-14T08:49:17Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/614\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/614\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/613","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/613\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/613\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/613\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/613","id":699117070,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg0ODkyMTUx","number":613,"title":"Add CoNLL-2003 shared task dataset","user":{"login":"vblagoje","id":458335,"node_id":"MDQ6VXNlcjQ1ODMzNQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/458335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vblagoje","html_url":"https:\/\/github.com\/vblagoje","followers_url":"https:\/\/api.github.com\/users\/vblagoje\/followers","following_url":"https:\/\/api.github.com\/users\/vblagoje\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vblagoje\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vblagoje\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vblagoje\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vblagoje\/orgs","repos_url":"https:\/\/api.github.com\/users\/vblagoje\/repos","events_url":"https:\/\/api.github.com\/users\/vblagoje\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vblagoje\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-09-11T10:02:30Z","updated_at":"2020-10-05T10:43:05Z","closed_at":"2020-09-17T10:36:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/613","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/613","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/613.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/613.patch","merged_at":"2020-09-17T10:36:38Z"},"body":"Please consider adding CoNLL-2003 shared task dataset as it's beneficial for token classification tasks. The motivation behind this PR is the [PR](https:\/\/github.com\/huggingface\/transformers\/pull\/7041) in the transformers project. This dataset would be not only useful for the usual run-of-the-mill NER tasks but also for syntactic chunking and part-of-speech (POS) tagging. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/613\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/613\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/612","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/612\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/612\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/612\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/612","id":699008644,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg0Nzk2Mjg5","number":612,"title":"add multi-proc to dataset dict","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-11T08:18:13Z","updated_at":"2020-09-11T10:20:13Z","closed_at":"2020-09-11T10:20:11Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/612","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/612","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/612.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/612.patch","merged_at":"2020-09-11T10:20:11Z"},"body":"Add multi-proc to `DatasetDict`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/612\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/612\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/611","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/611\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/611\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/611\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/611","id":698863988,"node_id":"MDU6SXNzdWU2OTg4NjM5ODg=","number":611,"title":"ArrowCapacityError: List array cannot contain more than 2147483646 child elements, have 2147483648","user":{"login":"sangyx","id":32364921,"node_id":"MDQ6VXNlcjMyMzY0OTIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32364921?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sangyx","html_url":"https:\/\/github.com\/sangyx","followers_url":"https:\/\/api.github.com\/users\/sangyx\/followers","following_url":"https:\/\/api.github.com\/users\/sangyx\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sangyx\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sangyx\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sangyx\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sangyx\/orgs","repos_url":"https:\/\/api.github.com\/users\/sangyx\/repos","events_url":"https:\/\/api.github.com\/users\/sangyx\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sangyx\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-09-11T05:29:12Z","updated_at":"2020-09-25T15:14:55Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I'm trying to load a dataset from Dataframe, but I get the error:\r\n```bash\r\n---------------------------------------------------------------------------\r\nArrowCapacityError Traceback (most recent call last)\r\n in \r\n----> 1 dataset = Dataset.from_pandas(emb)\r\n\r\n~\/miniconda3\/envs\/dev\/lib\/python3.7\/site-packages\/nlp\/arrow_dataset.py in from_pandas(cls, df, features, info, split)\r\n 223 info.features = features\r\n 224 pa_table: pa.Table = pa.Table.from_pandas(\r\n--> 225 df=df, schema=pa.schema(features.type) if features is not None else None\r\n 226 )\r\n 227 return cls(pa_table, info=info, split=split)\r\n\r\n~\/miniconda3\/envs\/dev\/lib\/python3.7\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.from_pandas()\r\n\r\n~\/miniconda3\/envs\/dev\/lib\/python3.7\/site-packages\/pyarrow\/pandas_compat.py in dataframe_to_arrays(df, schema, preserve_index, nthreads, columns, safe)\r\n 591 for i, maybe_fut in enumerate(arrays):\r\n 592 if isinstance(maybe_fut, futures.Future):\r\n--> 593 arrays[i] = maybe_fut.result()\r\n 594 \r\n 595 types = [x.type for x in arrays]\r\n\r\n~\/miniconda3\/envs\/dev\/lib\/python3.7\/concurrent\/futures\/_base.py in result(self, timeout)\r\n 426 raise CancelledError()\r\n 427 elif self._state == FINISHED:\r\n--> 428 return self.__get_result()\r\n 429 \r\n 430 self._condition.wait(timeout)\r\n\r\n~\/miniconda3\/envs\/dev\/lib\/python3.7\/concurrent\/futures\/_base.py in __get_result(self)\r\n 382 def __get_result(self):\r\n 383 if self._exception:\r\n--> 384 raise self._exception\r\n 385 else:\r\n 386 return self._result\r\n\r\n~\/miniconda3\/envs\/dev\/lib\/python3.7\/concurrent\/futures\/thread.py in run(self)\r\n 55 \r\n 56 try:\r\n---> 57 result = self.fn(*self.args, **self.kwargs)\r\n 58 except BaseException as exc:\r\n 59 self.future.set_exception(exc)\r\n\r\n~\/miniconda3\/envs\/dev\/lib\/python3.7\/site-packages\/pyarrow\/pandas_compat.py in convert_column(col, field)\r\n 557 \r\n 558 try:\r\n--> 559 result = pa.array(col, type=type_, from_pandas=True, safe=safe)\r\n 560 except (pa.ArrowInvalid,\r\n 561 pa.ArrowNotImplementedError,\r\n\r\n~\/miniconda3\/envs\/dev\/lib\/python3.7\/site-packages\/pyarrow\/array.pxi in pyarrow.lib.array()\r\n\r\n~\/miniconda3\/envs\/dev\/lib\/python3.7\/site-packages\/pyarrow\/array.pxi in pyarrow.lib._ndarray_to_array()\r\n\r\n~\/miniconda3\/envs\/dev\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowCapacityError: List array cannot contain more than 2147483646 child elements, have 2147483648\r\n```\r\nMy code is :\r\n```python\r\nfrom nlp import Dataset\r\ndataset = Dataset.from_pandas(emb)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/611\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/611\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/610","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/610\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/610\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/610\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/610","id":698349388,"node_id":"MDU6SXNzdWU2OTgzNDkzODg=","number":610,"title":"Load text file for RoBERTa pre-training. ","user":{"login":"chiyuzhang94","id":33407613,"node_id":"MDQ6VXNlcjMzNDA3NjEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33407613?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chiyuzhang94","html_url":"https:\/\/github.com\/chiyuzhang94","followers_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/followers","following_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/orgs","repos_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/repos","events_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chiyuzhang94\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":42,"created_at":"2020-09-10T18:41:38Z","updated_at":"2021-04-10T08:44:04Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I migrate my question from https:\/\/github.com\/huggingface\/transformers\/pull\/4009#issuecomment-690039444\r\n\r\nI tried to train a Roberta from scratch using transformers. But I got OOM issues with loading a large text file. \r\nAccording to the suggestion from @thomwolf , I tried to implement `datasets` to load my text file. This test.txt is a simple sample where each line is a sentence.\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('text', data_files='test.txt',cache_dir=\".\/\")\r\ndataset.set_format(type='torch',columns=[\"text\"])\r\ndataloader = torch.utils.data.DataLoader(dataset, batch_size=8)\r\nnext(iter(dataloader))\r\n```\r\n\r\nBut dataload cannot yield sample and error is:\r\n```\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n in \r\n----> 1 next(iter(dataloader))\r\n\r\n\/Library\/Python\/3.7\/site-packages\/torch\/utils\/data\/dataloader.py in __next__(self)\r\n 361 \r\n 362 def __next__(self):\r\n--> 363 data = self._next_data()\r\n 364 self._num_yielded += 1\r\n 365 if self._dataset_kind == _DatasetKind.Iterable and \\\r\n\r\n\/Library\/Python\/3.7\/site-packages\/torch\/utils\/data\/dataloader.py in _next_data(self)\r\n 401 def _next_data(self):\r\n 402 index = self._next_index() # may raise StopIteration\r\n--> 403 data = self._dataset_fetcher.fetch(index) # may raise StopIteration\r\n 404 if self._pin_memory:\r\n 405 data = _utils.pin_memory.pin_memory(data)\r\n\r\n\/Library\/Python\/3.7\/site-packages\/torch\/utils\/data\/_utils\/fetch.py in fetch(self, possibly_batched_index)\r\n 42 def fetch(self, possibly_batched_index):\r\n 43 if self.auto_collation:\r\n---> 44 data = [self.dataset[idx] for idx in possibly_batched_index]\r\n 45 else:\r\n 46 data = self.dataset[possibly_batched_index]\r\n\r\n\/Library\/Python\/3.7\/site-packages\/torch\/utils\/data\/_utils\/fetch.py in (.0)\r\n 42 def fetch(self, possibly_batched_index):\r\n 43 if self.auto_collation:\r\n---> 44 data = [self.dataset[idx] for idx in possibly_batched_index]\r\n 45 else:\r\n 46 data = self.dataset[possibly_batched_index]\r\n\r\nKeyError: 0\r\n```\r\n\r\n`dataset.set_format(type='torch',columns=[\"text\"])` returns a log says:\r\n```\r\nSet __getitem__(key) output type to torch for ['text'] columns (when key is int or slice) and don't output other (un-formatted) columns.\r\n```\r\n\r\nI noticed the dataset is `DatasetDict({'train': Dataset(features: {'text': Value(dtype='string', id=None)}, num_rows: 44)})`.\r\nEach sample can be accessed by `dataset[\"train\"][\"text\"]` instead of `dataset[\"text\"]`. \r\n\r\nCould you please give me any suggestions on how to modify this code to load the text file?\r\n\r\nVersions:\r\nPython version 3.7.3\r\nPyTorch version 1.6.0 \r\nTensorFlow version 2.3.0 \r\ndatasets version: 1.0.1","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/610\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/610\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/609","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/609\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/609\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/609\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/609","id":698323989,"node_id":"MDExOlB1bGxSZXF1ZXN0NDg0MTc4Nzky","number":609,"title":"Update GLUE URLs (now hosted on FB)","user":{"login":"jeswan","id":57466294,"node_id":"MDQ6VXNlcjU3NDY2Mjk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57466294?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jeswan","html_url":"https:\/\/github.com\/jeswan","followers_url":"https:\/\/api.github.com\/users\/jeswan\/followers","following_url":"https:\/\/api.github.com\/users\/jeswan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jeswan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jeswan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jeswan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jeswan\/orgs","repos_url":"https:\/\/api.github.com\/users\/jeswan\/repos","events_url":"https:\/\/api.github.com\/users\/jeswan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jeswan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-10T18:16:32Z","updated_at":"2020-09-14T19:06:02Z","closed_at":"2020-09-14T19:06:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/609","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/609","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/609.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/609.patch","merged_at":null},"body":"NYU is switching dataset hosting from Google to FB. This PR closes https:\/\/github.com\/huggingface\/datasets\/issues\/608 and is necessary for https:\/\/github.com\/jiant-dev\/jiant\/issues\/161. This PR updates the data URLs based on changes made in https:\/\/github.com\/nyu-mll\/jiant\/pull\/1112.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/609\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/609\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/608","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/608\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/608\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/608\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/608","id":698291156,"node_id":"MDU6SXNzdWU2OTgyOTExNTY=","number":608,"title":"Don't use the old NYU GLUE dataset URLs","user":{"login":"jeswan","id":57466294,"node_id":"MDQ6VXNlcjU3NDY2Mjk0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57466294?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jeswan","html_url":"https:\/\/github.com\/jeswan","followers_url":"https:\/\/api.github.com\/users\/jeswan\/followers","following_url":"https:\/\/api.github.com\/users\/jeswan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jeswan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jeswan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jeswan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jeswan\/orgs","repos_url":"https:\/\/api.github.com\/users\/jeswan\/repos","events_url":"https:\/\/api.github.com\/users\/jeswan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jeswan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-10T17:47:02Z","updated_at":"2020-09-16T06:53:18Z","closed_at":"2020-09-16T06:53:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"NYU is switching dataset hosting from Google to FB. Initial changes to `datasets` are in https:\/\/github.com\/jeswan\/nlp\/commit\/b7d4a071d432592ded971e30ef73330529de25ce. What tests do you suggest I run before opening a PR?\r\n\r\nSee: https:\/\/github.com\/jiant-dev\/jiant\/issues\/161 and https:\/\/github.com\/nyu-mll\/jiant\/pull\/1112","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/608\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/608\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/607","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/607\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/607\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/607\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/607","id":698094442,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgzOTcyMDg4","number":607,"title":"Add transmit_format wrapper and tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-10T15:03:50Z","updated_at":"2020-09-10T15:21:48Z","closed_at":"2020-09-10T15:21:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/607","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/607","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/607.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/607.patch","merged_at":"2020-09-10T15:21:47Z"},"body":"Same as #605 but using a decorator on-top of dataset transforms that are not in place","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/607\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/607\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/606","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/606\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/606\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/606\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/606","id":698050442,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgzOTMzMDA1","number":606,"title":"Quick fix :)","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-10T14:32:06Z","updated_at":"2020-09-10T16:18:32Z","closed_at":"2020-09-10T16:18:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/606","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/606","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/606.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/606.patch","merged_at":"2020-09-10T16:18:30Z"},"body":"`nlp` => `datasets`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/606\/reactions","total_count":3,"+1":0,"-1":0,"laugh":1,"hooray":1,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/606\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/605","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/605\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/605\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/605\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/605","id":697887401,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgzNzg1Mjc1","number":605,"title":"[Datasets] Transmit format to children","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-10T12:30:18Z","updated_at":"2020-09-10T16:15:21Z","closed_at":"2020-09-10T16:15:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/605","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/605","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/605.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/605.patch","merged_at":null},"body":"Transmit format to children obtained when processing a dataset.\r\n\r\nAdded a test.\r\n\r\nWhen concatenating datasets, if the formats are disparate, the concatenated dataset has a format reset to defaults.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/605\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/605\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/604","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/604\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/604\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/604\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/604","id":697774581,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgzNjgxNTc0","number":604,"title":"Update bucket prefix","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-10T11:01:13Z","updated_at":"2020-09-10T12:45:33Z","closed_at":"2020-09-10T12:45:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/604","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/604","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/604.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/604.patch","merged_at":"2020-09-10T12:45:32Z"},"body":"cc @julien-c ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/604\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/604\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/603","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/603\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/603\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/603\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/603","id":697758750,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgzNjY2ODk5","number":603,"title":"Set scripts version to master","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-10T10:47:44Z","updated_at":"2020-09-10T11:02:05Z","closed_at":"2020-09-10T11:02:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/603","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/603","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/603.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/603.patch","merged_at":"2020-09-10T11:02:04Z"},"body":"By default the scripts version is master, so that if the library is installed with \r\n```\r\npip install git+http:\/\/github.com\/huggingface\/nlp.git\r\n```\r\nor\r\n```\r\ngit clone http:\/\/github.com\/huggingface\/nlp.git\r\npip install -e .\/nlp\r\n```\r\n\r\nwill use the latest scripts, and not the ones from the previous version.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/603\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/603\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/602","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/602\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/602\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/602\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/602","id":697636605,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgzNTU3NDM0","number":602,"title":"apply offset to indices in multiprocessed map","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-10T08:54:30Z","updated_at":"2020-09-10T11:03:39Z","closed_at":"2020-09-10T11:03:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/602","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/602","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/602.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/602.patch","merged_at":"2020-09-10T11:03:37Z"},"body":"Fix #597 \r\n\r\nI fixed the indices by applying an offset.\r\nI added the case to our tests to make sure it doesn't happen again.\r\n\r\nI also added the message proposed by @thomwolf in #597 \r\n\r\n```python\r\n>>> d.select(range(10)).map(fn, with_indices=True, batched=True, num_proc=2, load_from_cache_file=False)\r\nDone writing 10 indices in 80 bytes .\r\nTesting the mapped function outputs\r\n[0, 1]\r\nTesting finished, running the mapping function on the dataset\r\nDone writing 5 indices in 41 bytes .\r\nDone writing 5 indices in 41 bytes .\r\nSpawning 2 processes\r\n[0, 1, 2, 3, 4]\r\n[5, 6, 7, 8, 9]\r\n#0: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 377.90ba\/s]\r\n#1: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 378.92ba\/s]\r\nConcatenating 2 shards from multiprocessing\r\n\r\n# Dataset(features: {'label': ClassLabel(num_classes=2, names=['neg', 'pos'], names_file=None, id=None), 'text': Value(dtype='string', id=None)}, num_rows: 10)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/602\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/602\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/601","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/601\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/601\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/601\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/601","id":697574848,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgzNTAzMjAw","number":601,"title":"check if trasnformers has PreTrainedTokenizerBase","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-10T07:54:56Z","updated_at":"2020-09-10T11:01:37Z","closed_at":"2020-09-10T11:01:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/601","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/601","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/601.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/601.patch","merged_at":"2020-09-10T11:01:36Z"},"body":"Fix #598 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/601\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/601\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/600","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/600\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/600\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/600\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/600","id":697496913,"node_id":"MDU6SXNzdWU2OTc0OTY5MTM=","number":600,"title":"Pickling error when loading dataset","user":{"login":"kandorm","id":17310286,"node_id":"MDQ6VXNlcjE3MzEwMjg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17310286?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kandorm","html_url":"https:\/\/github.com\/kandorm","followers_url":"https:\/\/api.github.com\/users\/kandorm\/followers","following_url":"https:\/\/api.github.com\/users\/kandorm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kandorm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kandorm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kandorm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kandorm\/orgs","repos_url":"https:\/\/api.github.com\/users\/kandorm\/repos","events_url":"https:\/\/api.github.com\/users\/kandorm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kandorm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-09-10T06:28:08Z","updated_at":"2020-09-25T14:31:54Z","closed_at":"2020-09-25T14:31:54Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI modified line 136 in the original [run_language_modeling.py](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/language-modeling\/run_language_modeling.py) as:\r\n\r\n```\r\n# line 136: return LineByLineTextDataset(tokenizer=tokenizer, file_path=file_path, block_size=args.block_size)\r\ndataset = load_dataset(\"text\", data_files=file_path, split=\"train\")\r\ndataset = dataset.map(lambda ex: tokenizer(ex[\"text\"], add_special_tokens=True,\r\n truncation=True, max_length=args.block_size), batched=True)\r\ndataset.set_format(type='torch', columns=['input_ids'])\r\nreturn dataset\r\n```\r\n\r\nWhen I run this with transformers (3.1.0) and nlp (0.4.0), I get the following error:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"src\/run_language_modeling.py\", line 319, in \r\n main()\r\n File \"src\/run_language_modeling.py\", line 248, in main\r\n get_dataset(data_args, tokenizer=tokenizer, cache_dir=model_args.cache_dir) if training_args.do_train else None\r\n File \"src\/run_language_modeling.py\", line 139, in get_dataset\r\n dataset = dataset.map(lambda ex: tokenizer(ex[\"text\"], add_special_tokens=True, truncation=True, max_length=args.block_size), batched=True)\r\n File \"\/data\/nlp\/src\/nlp\/arrow_dataset.py\", line 1136, in map\r\n new_fingerprint=new_fingerprint,\r\n File \"\/data\/nlp\/src\/nlp\/fingerprint.py\", line 158, in wrapper\r\n self._fingerprint, transform, kwargs_for_fingerprint\r\n File \"\/data\/nlp\/src\/nlp\/fingerprint.py\", line 105, in update_fingerprint\r\n hasher.update(transform_args[key])\r\n File \"\/data\/nlp\/src\/nlp\/fingerprint.py\", line 57, in update\r\n self.m.update(self.hash(value).encode(\"utf-8\"))\r\n File \"\/data\/nlp\/src\/nlp\/fingerprint.py\", line 53, in hash\r\n return cls.hash_default(value)\r\n File \"\/data\/nlp\/src\/nlp\/fingerprint.py\", line 46, in hash_default\r\n return cls.hash_bytes(dumps(value))\r\n File \"\/data\/nlp\/src\/nlp\/utils\/py_utils.py\", line 362, in dumps\r\n dump(obj, file)\r\n File \"\/data\/nlp\/src\/nlp\/utils\/py_utils.py\", line 339, in dump\r\n Pickler(file, recurse=True).dump(obj)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 446, in dump\r\n StockPickler.dump(self, obj)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 409, in dump\r\n self.save(obj)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 1438, in save_function\r\n obj.__dict__, fkwdefaults), obj=obj)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 610, in save_reduce\r\n save(args)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 751, in save_tuple\r\n save(element)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 736, in save_tuple\r\n save(element)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 1170, in save_cell\r\n pickler.save_reduce(_create_cell, (f,), obj=obj)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 610, in save_reduce\r\n save(args)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 736, in save_tuple\r\n save(element)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 521, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 605, in save_reduce\r\n save(cls)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 1365, in save_type\r\n obj.__bases__, _dict), obj=obj)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 610, in save_reduce\r\n save(args)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 751, in save_tuple\r\n save(element)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 933, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 476, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/site-packages\/dill\/_dill.py\", line 933, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 821, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 847, in _batch_setitems\r\n save(v)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 507, in save\r\n self.save_global(obj, rv)\r\n File \"\/root\/miniconda3\/envs\/py3.6\/lib\/python3.6\/pickle.py\", line 927, in save_global\r\n (obj, module_name, name))\r\n_pickle.PicklingError: Can't pickle typing.Union[str, NoneType]: it's not the same object as typing.Union\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/600\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/600\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/599","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/599\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/599\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/599\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/599","id":697377786,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgzMzI3ODQ5","number":599,"title":"Add MATINF dataset","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-10T03:31:09Z","updated_at":"2020-09-17T12:17:25Z","closed_at":"2020-09-17T12:17:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/599","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/599","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/599.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/599.patch","merged_at":null},"body":"@lhoestq The command to create metadata failed. I guess it's because the zip is not downloaded from a remote address? How to solve that? Also the CI fails and I don't know how to fix that :(","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/599\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/599\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/598","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/598\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/598\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/598\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/598","id":697156501,"node_id":"MDU6SXNzdWU2OTcxNTY1MDE=","number":598,"title":"The current version of the package on github has an error when loading dataset","user":{"login":"zeyuyun1","id":43428393,"node_id":"MDQ6VXNlcjQzNDI4Mzkz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43428393?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zeyuyun1","html_url":"https:\/\/github.com\/zeyuyun1","followers_url":"https:\/\/api.github.com\/users\/zeyuyun1\/followers","following_url":"https:\/\/api.github.com\/users\/zeyuyun1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zeyuyun1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zeyuyun1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zeyuyun1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zeyuyun1\/orgs","repos_url":"https:\/\/api.github.com\/users\/zeyuyun1\/repos","events_url":"https:\/\/api.github.com\/users\/zeyuyun1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zeyuyun1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-09-09T21:03:23Z","updated_at":"2020-09-10T06:25:21Z","closed_at":"2020-09-09T22:57:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Instead of downloading the package from pip, downloading the version from source will result in an error when loading dataset (the pip version is completely fine):\r\n\r\nTo recreate the error: \r\nFirst, installing nlp directly from source:\r\n```\r\ngit clone https:\/\/github.com\/huggingface\/nlp.git\r\ncd nlp\r\npip install -e .\r\n```\r\nThen run:\r\n```\r\nfrom nlp import load_dataset\r\ndataset = load_dataset('wikitext', 'wikitext-2-v1',split = 'train') \r\n```\r\nwill give error:\r\n\r\n```\r\n>>> dataset = load_dataset('wikitext', 'wikitext-2-v1',split = 'train')\r\nChecking \/home\/zeyuy\/.cache\/huggingface\/datasets\/84a754b488511b109e2904672d809c041008416ae74e38f9ee0c80a8dffa1383.2e21f48d63b5572d19c97e441fbb802257cf6a4c03fbc5ed8fae3d2c2273f59e.py for additional imports.\r\nFound main folder for dataset https:\/\/raw.githubusercontent.com\/huggingface\/nlp\/0.4.0\/datasets\/wikitext\/wikitext.py at \/home\/zeyuy\/.cache\/huggingface\/modules\/nlp_modules\/datasets\/wikitext\r\nFound specific version folder for dataset https:\/\/raw.githubusercontent.com\/huggingface\/nlp\/0.4.0\/datasets\/wikitext\/wikitext.py at \/home\/zeyuy\/.cache\/huggingface\/modules\/nlp_modules\/datasets\/wikitext\/5de6e79516446f747fcccc09aa2614fa159053b75909594d28d262395f72d89d\r\nFound script file from https:\/\/raw.githubusercontent.com\/huggingface\/nlp\/0.4.0\/datasets\/wikitext\/wikitext.py to \/home\/zeyuy\/.cache\/huggingface\/modules\/nlp_modules\/datasets\/wikitext\/5de6e79516446f747fcccc09aa2614fa159053b75909594d28d262395f72d89d\/wikitext.py\r\nFound dataset infos file from https:\/\/raw.githubusercontent.com\/huggingface\/nlp\/0.4.0\/datasets\/wikitext\/dataset_infos.json to \/home\/zeyuy\/.cache\/huggingface\/modules\/nlp_modules\/datasets\/wikitext\/5de6e79516446f747fcccc09aa2614fa159053b75909594d28d262395f72d89d\/dataset_infos.json\r\nFound metadata file for dataset https:\/\/raw.githubusercontent.com\/huggingface\/nlp\/0.4.0\/datasets\/wikitext\/wikitext.py at \/home\/zeyuy\/.cache\/huggingface\/modules\/nlp_modules\/datasets\/wikitext\/5de6e79516446f747fcccc09aa2614fa159053b75909594d28d262395f72d89d\/wikitext.json\r\nLoading Dataset Infos from \/home\/zeyuy\/.cache\/huggingface\/modules\/nlp_modules\/datasets\/wikitext\/5de6e79516446f747fcccc09aa2614fa159053b75909594d28d262395f72d89d\r\nOverwrite dataset info from restored data version.\r\nLoading Dataset info from \/home\/zeyuy\/.cache\/huggingface\/datasets\/wikitext\/wikitext-2-v1\/1.0.0\/5de6e79516446f747fcccc09aa2614fa159053b75909594d28d262395f72d89d\r\nReusing dataset wikitext (\/home\/zeyuy\/.cache\/huggingface\/datasets\/wikitext\/wikitext-2-v1\/1.0.0\/5de6e79516446f747fcccc09aa2614fa159053b75909594d28d262395f72d89d)\r\nConstructing Dataset for split train, from \/home\/zeyuy\/.cache\/huggingface\/datasets\/wikitext\/wikitext-2-v1\/1.0.0\/5de6e79516446f747fcccc09aa2614fa159053b75909594d28d262395f72d89d\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/load.py\", line 600, in load_dataset\r\n ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications)\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/builder.py\", line 611, in as_dataset\r\n datasets = utils.map_nested(\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/utils\/py_utils.py\", line 216, in map_nested\r\n return function(data_struct)\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/builder.py\", line 631, in _build_single_dataset\r\n ds = self._as_dataset(\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/builder.py\", line 704, in _as_dataset\r\n return Dataset(**dataset_kwargs)\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/arrow_dataset.py\", line 188, in __init__\r\n self._fingerprint = generate_fingerprint(self)\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/fingerprint.py\", line 91, in generate_fingerprint\r\n hasher.update(key)\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/fingerprint.py\", line 57, in update\r\n self.m.update(self.hash(value).encode(\"utf-8\"))\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/fingerprint.py\", line 53, in hash\r\n return cls.hash_default(value)\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/fingerprint.py\", line 46, in hash_default\r\n return cls.hash_bytes(dumps(value))\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/utils\/py_utils.py\", line 361, in dumps\r\n with _no_cache_fields(obj):\r\n File \"\/home\/zeyuy\/miniconda3\/lib\/python3.8\/contextlib.py\", line 113, in __enter__\r\n return next(self.gen)\r\n File \"\/home\/zeyuy\/transformers\/examples\/language-modeling\/nlp\/src\/nlp\/utils\/py_utils.py\", line 348, in _no_cache_fields\r\n if isinstance(obj, tr.PreTrainedTokenizerBase) and hasattr(obj, \"cache\") and isinstance(obj.cache, dict):\r\nAttributeError: module 'transformers' has no attribute 'PreTrainedTokenizerBase'\r\n\r\n```\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/598\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/598\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/597","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/597\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/597\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/597\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/597","id":697112029,"node_id":"MDU6SXNzdWU2OTcxMTIwMjk=","number":597,"title":"Indices incorrect with multiprocessing","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2020-09-09T19:50:56Z","updated_at":"2020-09-10T11:03:37Z","closed_at":"2020-09-10T11:03:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When `num_proc` > 1, the indices argument passed to the map function is incorrect:\r\n\r\n```python\r\nd = load_dataset('imdb', split='test[:1%]')\r\n\r\ndef fn(x, inds):\r\n print(inds)\r\n return x\r\n\r\nd.select(range(10)).map(fn, with_indices=True, batched=True)\r\n# [0, 1]\r\n# [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\r\n\r\nd.select(range(10)).map(fn, with_indices=True, batched=True, num_proc=2)\r\n# [0, 1]\r\n# [0, 1]\r\n# [0, 1, 2, 3, 4]\r\n# [0, 1, 2, 3, 4]\r\n```\r\n\r\nAs you can see, the subset passed to each thread is indexed from 0 to N which doesn't reflect their positions in `d`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/597\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/597\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/596","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/596\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/596\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/596\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/596","id":696928139,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgyOTM5MTgw","number":596,"title":"[style\/quality] Moving to isort 5.0.0 + style\/quality on datasets and metrics","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-09T15:47:21Z","updated_at":"2020-09-10T10:05:04Z","closed_at":"2020-09-10T10:05:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/596","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/596","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/596.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/596.patch","merged_at":"2020-09-10T10:05:03Z"},"body":"Move the repo to isort 5.0.0.\r\n\r\nAlso start testing style\/quality on datasets and metrics.\r\n\r\nSpecific rule: we allow F401 (unused imports) in metrics to be able to add imports to detect early on missing dependencies.\r\nMaybe we could add this in datasets but while cleaning this I've seen many example of really unused imports in dataset so maybe it's better to have it as a line-by-line nova instead of a general rule like in metrics.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/596\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/596\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/595","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/595\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/595\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/595\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/595","id":696892304,"node_id":"MDU6SXNzdWU2OTY4OTIzMDQ=","number":595,"title":"`Dataset`\/`DatasetDict` has no attribute 'save_to_disk'","user":{"login":"sudarshan85","id":488428,"node_id":"MDQ6VXNlcjQ4ODQyOA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/488428?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sudarshan85","html_url":"https:\/\/github.com\/sudarshan85","followers_url":"https:\/\/api.github.com\/users\/sudarshan85\/followers","following_url":"https:\/\/api.github.com\/users\/sudarshan85\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sudarshan85\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sudarshan85\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sudarshan85\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sudarshan85\/orgs","repos_url":"https:\/\/api.github.com\/users\/sudarshan85\/repos","events_url":"https:\/\/api.github.com\/users\/sudarshan85\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sudarshan85\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-09T15:01:52Z","updated_at":"2020-09-09T16:20:19Z","closed_at":"2020-09-09T16:20:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nAs the title indicates, both `Dataset` and `DatasetDict` classes don't seem to have the `save_to_disk` method. While the file [`arrow_dataset.py`](https:\/\/github.com\/huggingface\/nlp\/blob\/34bf0b03bfe03e7f77b8fec1cd48f5452c4fc7c1\/src\/nlp\/arrow_dataset.py) in the repo here has the method, the file `arrow_dataset.py` which is saved after `pip install nlp -U` in my `conda` environment DOES NOT contain the `save_to_disk` method. I even tried `pip install git+https:\/\/github.com\/huggingface\/nlp.git ` and still no luck. Do I need to install the library in another way?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/595\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/595\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/594","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/594\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/594\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/594\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/594","id":696816893,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgyODQ1OTc5","number":594,"title":"Fix germeval url","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-09T13:29:35Z","updated_at":"2020-09-09T13:34:35Z","closed_at":"2020-09-09T13:34:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/594","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/594","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/594.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/594.patch","merged_at":"2020-09-09T13:34:34Z"},"body":"Continuation of #593 but without the dummy data hack","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/594\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/594\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/593","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/593\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/593\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/593\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/593","id":696679182,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgyNzI5NTgw","number":593,"title":"GermEval 2014: new download urls","user":{"login":"stefan-it","id":20651387,"node_id":"MDQ6VXNlcjIwNjUxMzg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20651387?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stefan-it","html_url":"https:\/\/github.com\/stefan-it","followers_url":"https:\/\/api.github.com\/users\/stefan-it\/followers","following_url":"https:\/\/api.github.com\/users\/stefan-it\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stefan-it\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stefan-it\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stefan-it\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stefan-it\/orgs","repos_url":"https:\/\/api.github.com\/users\/stefan-it\/repos","events_url":"https:\/\/api.github.com\/users\/stefan-it\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stefan-it\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-09-09T10:07:29Z","updated_at":"2020-09-09T14:16:54Z","closed_at":"2020-09-09T13:35:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/593","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/593","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/593.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/593.patch","merged_at":null},"body":"Hi,\r\n\r\nunfortunately, the download links for the GermEval 2014 dataset have changed: they're now located on a Google Drive.\r\n\r\nI changed the URLs and bump version from 1.0.0 to 2.0.0.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/593\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/593\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/592","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/592\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/592\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/592\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/592","id":696619986,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgyNjc4MDkw","number":592,"title":"Test in memory and on disk","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-09T08:59:30Z","updated_at":"2020-09-09T13:50:04Z","closed_at":"2020-09-09T13:50:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/592","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/592","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/592.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/592.patch","merged_at":"2020-09-09T13:50:03Z"},"body":"I added test parameters to do every test both in memory and on disk.\r\nI also found a bug in concatenate_dataset thanks to the new tests and fixed it.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/592\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/592\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/591","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/591\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/591\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/591\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/591","id":696530413,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgyNjAxMzc1","number":591,"title":"fix #589 (backward compat)","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-09T07:33:13Z","updated_at":"2020-09-09T08:57:56Z","closed_at":"2020-09-09T08:57:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/591","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/591","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/591.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/591.patch","merged_at":"2020-09-09T08:57:54Z"},"body":"Fix #589","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/591\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/591\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/590","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/590\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/590\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/590\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/590","id":696501827,"node_id":"MDU6SXNzdWU2OTY1MDE4Mjc=","number":590,"title":"The process cannot access the file because it is being used by another process (windows)","user":{"login":"saareliad","id":22762845,"node_id":"MDQ6VXNlcjIyNzYyODQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22762845?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/saareliad","html_url":"https:\/\/github.com\/saareliad","followers_url":"https:\/\/api.github.com\/users\/saareliad\/followers","following_url":"https:\/\/api.github.com\/users\/saareliad\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/saareliad\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/saareliad\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/saareliad\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/saareliad\/orgs","repos_url":"https:\/\/api.github.com\/users\/saareliad\/repos","events_url":"https:\/\/api.github.com\/users\/saareliad\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/saareliad\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-09-09T07:01:36Z","updated_at":"2020-09-25T14:02:28Z","closed_at":"2020-09-25T14:02:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I consistently get the following error when developing in my PC (windows 10):\r\n\r\n```\r\n train_dataset = train_dataset.map(convert_to_features, batched=True)\r\n File \"C:\\Users\\saareliad\\AppData\\Local\\Continuum\\miniconda3\\envs\\py38\\lib\\site-packages\\nlp\\arrow_dataset.py\", line 970, in map\r\n shutil.move(tmp_file.name, cache_file_name)\r\n File \"C:\\Users\\saareliad\\AppData\\Local\\Continuum\\miniconda3\\envs\\py38\\lib\\shutil.py\", line 803, in move\r\n os.unlink(src)\r\nPermissionError: [WinError 32] The process cannot access the file because it is being used by another process: 'C:\\\\Users\\\\saareliad\\\\.cache\\\\huggingface\\\\datasets\\\\squad\\\\plain_text\\\\1.0.0\\\\408a8fa46a1e2805445b793f1022e743428ca739a34809fce872f0c7f17b44ab\\\\tmpsau1bep1'\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/590\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/590\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/589","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/589\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/589\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/589\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/589","id":696488447,"node_id":"MDU6SXNzdWU2OTY0ODg0NDc=","number":589,"title":"Cannot use nlp.load_dataset text, AttributeError: module 'nlp.utils' has no attribute 'logging'","user":{"login":"ksjae","id":17930170,"node_id":"MDQ6VXNlcjE3OTMwMTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17930170?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ksjae","html_url":"https:\/\/github.com\/ksjae","followers_url":"https:\/\/api.github.com\/users\/ksjae\/followers","following_url":"https:\/\/api.github.com\/users\/ksjae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ksjae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ksjae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ksjae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ksjae\/orgs","repos_url":"https:\/\/api.github.com\/users\/ksjae\/repos","events_url":"https:\/\/api.github.com\/users\/ksjae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ksjae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-09T06:46:53Z","updated_at":"2020-09-09T08:57:54Z","closed_at":"2020-09-09T08:57:54Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 533, in load_dataset\r\n builder_cls = import_main_class(module_path, dataset=True)\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 61, in import_main_class\r\n module = importlib.import_module(module_path)\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.7\/importlib\/__init__.py\", line 127, in import_module\r\n return _bootstrap._gcd_import(name[level:], package, level)\r\n File \"\", line 1006, in _gcd_import\r\n File \"\", line 983, in _find_and_load\r\n File \"\", line 967, in _find_and_load_unlocked\r\n File \"\", line 677, in _load_unlocked\r\n File \"\", line 728, in exec_module\r\n File \"\", line 219, in _call_with_frames_removed\r\n File \"\/root\/anaconda3\/envs\/pytorch\/lib\/python3.7\/site-packages\/nlp\/datasets\/text\/5dc629379536c4037d9c2063e1caa829a1676cf795f8e030cd90a537eba20c08\/text.py\", line 9, in \r\n logger = nlp.utils.logging.get_logger(__name__)\r\nAttributeError: module 'nlp.utils' has no attribute 'logging'\r\n```\r\n\r\nOccurs on the following code, or any code including the load_dataset('text'):\r\n```\r\ndataset = load_dataset(\"text\", data_files=file_path, split=\"train\")\r\ndataset = dataset.map(lambda ex: tokenizer(ex[\"text\"], add_special_tokens=True,\r\n truncation=True, max_length=args.block_size), batched=True)\r\ndataset.set_format(type='torch', columns=['input_ids'])\r\nreturn dataset\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/589\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/589\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/588","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/588\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/588\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/588\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/588","id":695249809,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgxNTE5NzQx","number":588,"title":"Support pathlike obj in load dataset ","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-07T16:13:21Z","updated_at":"2020-09-08T07:45:19Z","closed_at":"2020-09-08T07:45:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/588","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/588","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/588.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/588.patch","merged_at":"2020-09-08T07:45:17Z"},"body":"Fix #582 \r\n\r\n(I recreated the PR, I got an issue with git)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/588\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/588\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/587","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/587\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/587\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/587\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/587","id":695246018,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgxNTE2Mzkx","number":587,"title":"Support pathlike obj in load dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-07T16:09:16Z","updated_at":"2020-09-07T16:10:35Z","closed_at":"2020-09-07T16:10:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/587","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/587","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/587.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/587.patch","merged_at":null},"body":"Fix #582 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/587\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/587\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/586","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/586\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/586\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/586\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/586","id":695237999,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgxNTA5MzU1","number":586,"title":"Better message when data files is empty","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-07T15:59:57Z","updated_at":"2020-09-09T09:00:09Z","closed_at":"2020-09-09T09:00:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/586","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/586","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/586.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/586.patch","merged_at":"2020-09-09T09:00:07Z"},"body":"Fix #581 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/586\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/586\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/585","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/585\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/585\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/585\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/585","id":695191209,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgxNDY4NTM4","number":585,"title":"Fix select for pyarrow < 1.0.0","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-07T15:02:52Z","updated_at":"2020-09-08T07:43:17Z","closed_at":"2020-09-08T07:43:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/585","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/585","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/585.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/585.patch","merged_at":"2020-09-08T07:43:15Z"},"body":"Fix #583 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/585\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/585\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/584","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/584\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/584\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/584\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/584","id":695186652,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgxNDY0NjEz","number":584,"title":"Use github versioning","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-07T14:58:15Z","updated_at":"2020-09-09T13:37:35Z","closed_at":"2020-09-09T13:37:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/584","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/584","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/584.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/584.patch","merged_at":"2020-09-09T13:37:34Z"},"body":"Right now dataset scripts and metrics are downloaded from S3 which is in sync with master. It means that it's not currently possible to pin the dataset\/metric script version.\r\n\r\nTo fix that I changed the download url from S3 to github, and adding a `version` parameter in `load_dataset` and `load_metric` to pin a certain version of the lib, as in #562 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/584\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/584\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/583","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/583\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/583\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/583\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/583","id":695166265,"node_id":"MDU6SXNzdWU2OTUxNjYyNjU=","number":583,"title":"ArrowIndexError on Dataset.select","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-07T14:36:29Z","updated_at":"2020-09-08T07:43:15Z","closed_at":"2020-09-08T07:43:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"If the indices table consists in several chunks, then `dataset.select` results in an `ArrowIndexError` error for pyarrow < 1.0.0\r\n\r\nExample:\r\n\r\n```python\r\nfrom nlp import load_dataset\r\n\r\nmnli = load_dataset(\"glue\", \"mnli\", split=\"train\")\r\nshuffled = mnli.shuffle(seed=42)\r\nmnli.select(list(range(len(mnli))))\r\n```\r\n\r\nraises:\r\n```python\r\n---------------------------------------------------------------------------\r\nArrowIndexError Traceback (most recent call last)\r\n in \r\n----> 1 mnli.shuffle(seed=42).select(list(range(len(mnli))))\r\n\r\n~\/Desktop\/hf\/nlp\/src\/nlp\/fingerprint.py in wrapper(*args, **kwargs)\r\n 161 # Call actual function\r\n 162 \r\n--> 163 out = func(self, *args, **kwargs)\r\n 164 \r\n 165 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/Desktop\/hf\/nlp\/src\/nlp\/arrow_dataset.py in select(self, indices, keep_in_memory, indices_cache_file_name, writer_batch_size, new_fingerprint)\r\n 1653 if self._indices is not None:\r\n 1654 if PYARROW_V0:\r\n-> 1655 indices_array = self._indices.column(0).chunk(0).take(indices_array)\r\n 1656 else:\r\n 1657 indices_array = self._indices.column(0).take(indices_array)\r\n\r\n~\/.virtualenvs\/hf-datasets\/lib\/python3.7\/site-packages\/pyarrow\/array.pxi in pyarrow.lib.Array.take()\r\n\r\n~\/.virtualenvs\/hf-datasets\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowIndexError: take index out of bounds\r\n```\r\n\r\nThis is because the `take` method is only done on the first chunk which only contains 1000 elements by default (mnli has ~400 000 elements).\r\n\r\nShall we change that to use \r\n```python\r\npa.concat_tables(self._indices._indices.slice(i, 1) for i in indices_array)\r\n```\r\ninstead of `take` ? @thomwolf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/583\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/583\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/582","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/582\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/582\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/582\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/582","id":695126456,"node_id":"MDU6SXNzdWU2OTUxMjY0NTY=","number":582,"title":"Allow for PathLike objects","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-07T13:54:51Z","updated_at":"2020-09-08T07:45:17Z","closed_at":"2020-09-08T07:45:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Using PathLike objects as input for `load_dataset` does not seem to work. The following will throw an error.\r\n\r\n```python\r\nfiles = list(Path(r\"D:\\corpora\\yourcorpus\").glob(\"*.txt\"))\r\ndataset = load_dataset(\"text\", data_files=files)\r\n```\r\n\r\nTraceback:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"C:\/dev\/python\/dutch-simplification\/main.py\", line 7, in \r\n dataset = load_dataset(\"text\", data_files=files)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\load.py\", line 548, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\builder.py\", line 470, in download_and_prepare\r\n self._save_info()\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\builder.py\", line 564, in _save_info\r\n self.info.write_to_directory(self._cache_dir)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\info.py\", line 149, in write_to_directory\r\n self._dump_info(f)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\info.py\", line 156, in _dump_info\r\n file.write(json.dumps(asdict(self)).encode(\"utf-8\"))\r\n File \"c:\\users\\bramv\\appdata\\local\\programs\\python\\python38\\lib\\json\\__init__.py\", line 231, in dumps\r\n return _default_encoder.encode(obj)\r\n File \"c:\\users\\bramv\\appdata\\local\\programs\\python\\python38\\lib\\json\\encoder.py\", line 199, in encode\r\n chunks = self.iterencode(o, _one_shot=True)\r\n File \"c:\\users\\bramv\\appdata\\local\\programs\\python\\python38\\lib\\json\\encoder.py\", line 257, in iterencode\r\n return _iterencode(o, 0)\r\nTypeError: keys must be str, int, float, bool or None, not WindowsPath\r\n```\r\n\r\nWe have to cast to a string explicitly to make this work. It would be nicer if we could actually use PathLike objects.\r\n\r\n```python\r\nfiles = [str(f) for f in Path(r\"D:\\corpora\\wablieft\").glob(\"*.txt\")]\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/582\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/582\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/581","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/581\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/581\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/581\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/581","id":695120517,"node_id":"MDU6SXNzdWU2OTUxMjA1MTc=","number":581,"title":"Better error message when input file does not exist","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-07T13:47:59Z","updated_at":"2020-09-09T09:00:07Z","closed_at":"2020-09-09T09:00:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the following scenario, when `data_files` is an empty list, the stack trace and error message could be improved. This can probably be solved by checking for each file whether it actually exists and\/or whether the argument is not false-y.\r\n\r\n```python\r\ndataset = load_dataset(\"text\", data_files=[])\r\n```\r\n\r\nExample error trace.\r\n\r\n```\r\nUsing custom data configuration default\r\nDownloading and preparing dataset text\/default-d18f9b6611eb8e16 (download: Unknown size, generated: Unknown size, post-processed: Unknown sizetotal: Unknown size) to C:\\Users\\bramv\\.cache\\huggingface\\datasets\\text\\default-d18f9b6611eb8e16\\0.0.0\\3a79870d85f1982d6a2af884fde86a71c771747b4b161fd302d28ad22adf985b...\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\builder.py\", line 424, in incomplete_dir\r\n yield tmp_dir\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\builder.py\", line 462, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\builder.py\", line 537, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\builder.py\", line 813, in _prepare_split\r\n num_examples, num_bytes = writer.finalize()\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\arrow_writer.py\", line 217, in finalize\r\n self.pa_writer.close()\r\nAttributeError: 'NoneType' object has no attribute 'close'\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"C:\/dev\/python\/dutch-simplification\/main.py\", line 7, in \r\n dataset = load_dataset(\"text\", data_files=files)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\load.py\", line 548, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\builder.py\", line 470, in download_and_prepare\r\n self._save_info()\r\n File \"c:\\users\\bramv\\appdata\\local\\programs\\python\\python38\\lib\\contextlib.py\", line 131, in __exit__\r\n self.gen.throw(type, value, traceback)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\dutch-simplification-nbNdqK9u\\lib\\site-packages\\nlp\\builder.py\", line 430, in incomplete_dir\r\n shutil.rmtree(tmp_dir)\r\n File \"c:\\users\\bramv\\appdata\\local\\programs\\python\\python38\\lib\\shutil.py\", line 737, in rmtree\r\n return _rmtree_unsafe(path, onerror)\r\n File \"c:\\users\\bramv\\appdata\\local\\programs\\python\\python38\\lib\\shutil.py\", line 615, in _rmtree_unsafe\r\n onerror(os.unlink, fullname, sys.exc_info())\r\n File \"c:\\users\\bramv\\appdata\\local\\programs\\python\\python38\\lib\\shutil.py\", line 613, in _rmtree_unsafe\r\n os.unlink(fullname)\r\nPermissionError: [WinError 32] The process cannot access the file because it is being used by another process: 'C:\\\\Users\\\\bramv\\\\.cache\\\\huggingface\\\\datasets\\\\text\\\\default-d18f9b6611eb8e16\\\\0.0.0\\\\3a79870d85f1982d6a2af884fde86a71c771747b4b161fd302d28ad22adf985b.incomplete\\\\text-train.arrow'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/581\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/581\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/580","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/580\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/580\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/580\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/580","id":694954551,"node_id":"MDU6SXNzdWU2OTQ5NTQ1NTE=","number":580,"title":"nlp re-creates already-there caches when using a script, but not within a shell","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-07T10:23:50Z","updated_at":"2020-09-07T15:19:09Z","closed_at":"2020-09-07T14:26:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`nlp` keeps creating new caches for the same file when launching `filter` from a script, and behaves correctly from within the shell.\r\n\r\nExample: try running\r\n\r\n```\r\nimport nlp\r\n\r\nhans_easy_data = nlp.load_dataset('hans', split=\"validation\").filter(lambda x: x['label'] == 0)\r\nhans_hard_data = nlp.load_dataset('hans', split=\"validation\").filter(lambda x: x['label'] == 1)\r\n```\r\n\r\ntwice. If launched from a `file.py` script, the cache will be re-created the second time. If launched as 3 shell\/`ipython` commands, `nlp` will correctly re-use the cache.\r\nAs observed with @lhoestq.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/580\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/580\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/579","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/579\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/579\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/579\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/579","id":694947599,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgxMjU1OTI5","number":579,"title":"Doc metrics","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-07T10:15:24Z","updated_at":"2020-09-10T13:06:11Z","closed_at":"2020-09-10T13:06:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/579","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/579","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/579.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/579.patch","merged_at":"2020-09-10T13:06:10Z"},"body":"Adding documentation on metrics loading\/using\/sharing","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/579\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/579\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/578","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/578\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/578\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/578\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/578","id":694849940,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgxMTczNDE0","number":578,"title":"Add CommonGen Dataset","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-07T08:17:17Z","updated_at":"2020-09-07T11:50:29Z","closed_at":"2020-09-07T11:49:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/578","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/578","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/578.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/578.patch","merged_at":"2020-09-07T11:49:07Z"},"body":"CC Authors:\r\n@yuchenlin @MichaelZhouwang","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/578\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/578\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/577","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/577\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/577\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/577\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/577","id":694607148,"node_id":"MDU6SXNzdWU2OTQ2MDcxNDg=","number":577,"title":"Some languages in wikipedia dataset are not loading","user":{"login":"gaguilar","id":5833357,"node_id":"MDQ6VXNlcjU4MzMzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5833357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gaguilar","html_url":"https:\/\/github.com\/gaguilar","followers_url":"https:\/\/api.github.com\/users\/gaguilar\/followers","following_url":"https:\/\/api.github.com\/users\/gaguilar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gaguilar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gaguilar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gaguilar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gaguilar\/orgs","repos_url":"https:\/\/api.github.com\/users\/gaguilar\/repos","events_url":"https:\/\/api.github.com\/users\/gaguilar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gaguilar\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":13,"created_at":"2020-09-07T01:16:29Z","updated_at":"2021-07-15T15:55:26Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI am working with the `wikipedia` dataset and I have a script that goes over 92 of the available languages in that dataset. So far I have detected that `ar`, `af`, `an` are not loading. Other languages like `fr` and `en` are working fine. Here's how I am loading them:\r\n\r\n```\r\nimport nlp\r\n\r\nlangs = ['ar'. 'af', 'an']\r\n\r\nfor lang in langs:\r\n data = nlp.load_dataset('wikipedia', f'20200501.{lang}', beam_runner='DirectRunner', split='train') \r\n print(lang, len(data))\r\n```\r\n\r\nHere's what I see for 'ar' (it gets stuck there):\r\n```\r\nDownloading and preparing dataset wikipedia\/20200501.ar (download: Unknown size, generated: Unknown size, post-processed: Unknown sizetotal: Unknown size) to \/home\/gaguilar\/.cache\/huggingface\/datasets\/wikipedia\/20200501.ar\/1.0.0\/7be7f4324255faf70687be8692de57cf79197afdc33ff08d6a04ed602df32d50...\r\n```\r\n\r\nNote that those languages are indeed in the list of expected languages. Any suggestions on how to work around this? Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/577\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/577\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/576","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/576\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/576\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/576\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/576","id":694348645,"node_id":"MDExOlB1bGxSZXF1ZXN0NDgwNzM3NDQ1","number":576,"title":"Fix the code block in doc","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-06T11:40:55Z","updated_at":"2020-09-07T07:37:32Z","closed_at":"2020-09-07T07:37:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/576","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/576","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/576.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/576.patch","merged_at":"2020-09-07T07:37:18Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/576\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/576\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/575","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/575\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/575\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/575\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/575","id":693691611,"node_id":"MDU6SXNzdWU2OTM2OTE2MTE=","number":575,"title":"Couldn't reach certain URLs and for the ones that can be reached, code just blocks after downloading.","user":{"login":"sudarshan85","id":488428,"node_id":"MDQ6VXNlcjQ4ODQyOA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/488428?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sudarshan85","html_url":"https:\/\/github.com\/sudarshan85","followers_url":"https:\/\/api.github.com\/users\/sudarshan85\/followers","following_url":"https:\/\/api.github.com\/users\/sudarshan85\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sudarshan85\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sudarshan85\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sudarshan85\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sudarshan85\/orgs","repos_url":"https:\/\/api.github.com\/users\/sudarshan85\/repos","events_url":"https:\/\/api.github.com\/users\/sudarshan85\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sudarshan85\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-09-04T21:46:25Z","updated_at":"2020-09-22T10:41:36Z","closed_at":"2020-09-22T10:41:36Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI'm following the [quick tour](https:\/\/huggingface.co\/nlp\/quicktour.html) and tried to load the glue dataset:\r\n```\r\n>>> from nlp import load_dataset\r\n>>> dataset = load_dataset('glue', 'mrpc', split='train')\r\n```\r\n\r\nHowever, this ran into a `ConnectionError` saying it could not reach the URL (just pasting the last few lines):\r\n```\r\n\r\n\/net\/vaosl01\/opt\/NFS\/su0\/miniconda3\/envs\/hf\/lib\/python3.7\/site-packages\/nlp\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only)\r\n 354 \" to False.\"\r\n 355 )\r\n--> 356 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n 357 \r\n 358 # From now on, connected is True.\r\n\r\nConnectionError: Couldn't reach https:\/\/firebasestorage.googleapis.com\/v0\/b\/mtl-sentence-representations.appspot.com\/o\/data%2Fmrpc_dev_ids.tsv?alt=media&token=ec5c0836-31d5-48f4-b431-7480817f1adc\r\n```\r\n\r\nI tried glue with cola and sst2. I got the same error, just instead of mrpc in the URL, it was replaced with cola and sst2.\r\n\r\nSince this was not working, I thought I'll try another dataset. So I tried downloading the imdb dataset:\r\n```\r\nds = load_dataset('imdb', split='train')\r\n```\r\nThis downloads the data, but it just blocks after that:\r\n```\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 4.56k\/4.56k [00:00<00:00, 1.38MB\/s]\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2.07k\/2.07k [00:00<00:00, 1.15MB\/s]\r\nDownloading and preparing dataset imdb\/plain_text (download: 80.23 MiB, generated: 127.06 MiB, post-processed: Unknown sizetotal: 207.28 MiB) to \/net\/vaosl01\/opt\/NFS\/su0\/huggingface\/datasets\/imdb\/plain_text\/1.0.0\/76cdbd7249ea3548c928bbf304258dab44d09cd3638d9da8d42480d1d1be3743...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 84.1M\/84.1M [00:07<00:00, 11.1MB\/s]\r\n```\r\n\r\nI checked the folder `$HF_HOME\/datasets\/downloads\/extracted\/\/aclImdb`. This folder is constantly growing in size. When I navigated to the train folder within, there was no file. However, the test folder seemed to be populating. The last time I checked it was 327M. I thought the Imdb dataset was smaller than that. My questions are:\r\n1. Why is it still blocking? Is it still downloading?\r\n2. I specified split as train, so why is the test folder being populated?\r\n3. I read somewhere that after downloading, `nlp` converts the text files into some sort of `arrow` files, which will also take a while. Is this also happening here?\r\n\r\nThanks.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/575\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/575\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/574","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/574\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/574\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/574\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/574","id":693364853,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc5ODU5NzQy","number":574,"title":"Add modules cache","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-04T16:30:03Z","updated_at":"2020-09-22T10:27:08Z","closed_at":"2020-09-07T09:01:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/574","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/574","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/574.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/574.patch","merged_at":"2020-09-07T09:01:35Z"},"body":"As discusses in #554 , we should use a module cache directory outside of the python packages directory since we may not have write permissions.\r\n\r\nI added a new HF_MODULES_PATH directory that is added to the python path when doing `import nlp`.\r\nIn this directory, a module `nlp_modules` is created so that datasets can be added to `nlp_modules.datasets` and metrics to `nlp_modules.metrics`. `nlp_modules` doesn't exist on Pypi.\r\n\r\nIf someone using cloudpickle still wants to have the downloaded dataset\/metrics scripts to be inside the nlp directory, it is still possible to change the environment variable HF_MODULES_CACHE to be a path inside the nlp lib.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/574\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/574\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/573","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/573\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/573\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/573\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/573","id":693091790,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc5NjE4Mzc2","number":573,"title":"Faster caching for text dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-04T11:58:34Z","updated_at":"2020-09-04T12:53:24Z","closed_at":"2020-09-04T12:53:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/573","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/573","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/573.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/573.patch","merged_at":"2020-09-04T12:53:23Z"},"body":"As mentioned in #546 and #548 , hashing `data_files` contents to get the cache directory name for a text dataset can take a long time.\r\n\r\nTo make it faster I changed the hashing so that it takes into account the `path` and the `last modified timestamp` of each data file, instead of iterating through the content of each file to get a hash.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/573\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/573\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/572","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/572\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/572\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/572\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/572","id":692598231,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc5MTgyNDU3","number":572,"title":"Add CLUE Benchmark (11 datasets)","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-09-04T01:57:40Z","updated_at":"2020-09-07T09:59:11Z","closed_at":"2020-09-07T09:59:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/572","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/572","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/572.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/572.patch","merged_at":"2020-09-07T09:59:10Z"},"body":"Add 11 tasks of [CLUE](https:\/\/github.com\/CLUEbenchmark\/CLUE).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/572\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/572\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/571","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/571\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/571\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/571\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/571","id":692109287,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc4NzQ2MjMz","number":571,"title":"Serialization","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-09-03T16:21:38Z","updated_at":"2020-09-07T07:46:08Z","closed_at":"2020-09-07T07:46:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/571","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/571","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/571.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/571.patch","merged_at":"2020-09-07T07:46:07Z"},"body":"I added `save` and `load` method to serialize\/deserialize a dataset object in a folder.\r\nIt moves the arrow files there (or write them if the tables were in memory), and saves the pickle state in a json file `state.json`, except the info that are in a separate file `dataset_info.json`.\r\n\r\nExample:\r\n\r\n```python\r\nimport nlp\r\n\r\nsquad = nlp.load_dataset(\"squad\", split=\"train\")\r\nsquad.save(\"tmp\/squad\")\r\nsquad = nlp.Dataset.load(\"tmp\/squad\")\r\n```\r\n\r\n`ls tmp\/squad`\r\n```\r\ndataset_info.json squad-train.arrow state.json\r\n```\r\n\r\n`cat tmp\/squad\/state.json`\r\n```json\r\n{\r\n \"_data\": null,\r\n \"_data_files\": [\r\n {\r\n \"filename\": \"squad-train.arrow\",\r\n \"skip\": 0,\r\n \"take\": 87599\r\n }\r\n ],\r\n \"_fingerprint\": \"61f452797a686bc1\",\r\n \"_format_columns\": null,\r\n \"_format_kwargs\": {},\r\n \"_format_type\": null,\r\n \"_indexes\": {},\r\n \"_indices\": null,\r\n \"_indices_data_files\": [],\r\n \"_inplace_history\": [\r\n {\r\n \"transforms\": []\r\n }\r\n ],\r\n \"_output_all_columns\": false,\r\n \"_split\": \"train\"\r\n}\r\n```\r\n\r\n`cat tmp\/squad\/dataset_info.json`\r\n```json\r\n{\r\n \"builder_name\": \"squad\",\r\n \"citation\": \"@article{2016arXiv160605250R,\\n author = {{Rajpurkar}, Pranav and {Zhang}, Jian and {Lopyrev},\\n Konstantin and {Liang}, Percy},\\n title = \\\"{SQuAD: 100,000+ Questions for Machine Comprehension of Text}\\\",\\n journal = {arXiv e-prints},\\n year = 2016,\\n eid = {arXiv:1606.05250},\\n pages = {arXiv:1606.05250},\\narchivePrefix = {arXiv},\\n eprint = {1606.05250},\\n}\\n\",\r\n \"config_name\": \"plain_text\",\r\n \"dataset_size\": 89789763,\r\n \"description\": \"Stanford Question Answering Dataset (SQuAD) is a reading comprehension dataset, consisting of questions posed by crowdworkers on a set of Wikipedia articles, where the answer to every question is a segment of text, or span, from the corresponding reading passage, or the question might be unanswerable.\\n\",\r\n \"download_checksums\": {\r\n \"https:\/\/rajpurkar.github.io\/SQuAD-explorer\/dataset\/dev-v1.1.json\": {\r\n \"checksum\": \"95aa6a52d5d6a735563366753ca50492a658031da74f301ac5238b03966972c9\",\r\n \"num_bytes\": 4854279\r\n },\r\n \"https:\/\/rajpurkar.github.io\/SQuAD-explorer\/dataset\/train-v1.1.json\": {\r\n \"checksum\": \"3527663986b8295af4f7fcdff1ba1ff3f72d07d61a20f487cb238a6ef92fd955\",\r\n \"num_bytes\": 30288272\r\n }\r\n },\r\n \"download_size\": 35142551,\r\n \"features\": {\r\n \"answers\": {\r\n \"_type\": \"Sequence\",\r\n \"feature\": {\r\n \"answer_start\": {\r\n \"_type\": \"Value\",\r\n \"dtype\": \"int32\",\r\n \"id\": null\r\n },\r\n \"text\": {\r\n \"_type\": \"Value\",\r\n \"dtype\": \"string\",\r\n \"id\": null\r\n }\r\n },\r\n \"id\": null,\r\n \"length\": -1\r\n },\r\n \"context\": {\r\n \"_type\": \"Value\",\r\n \"dtype\": \"string\",\r\n \"id\": null\r\n },\r\n \"id\": {\r\n \"_type\": \"Value\",\r\n \"dtype\": \"string\",\r\n \"id\": null\r\n },\r\n \"question\": {\r\n \"_type\": \"Value\",\r\n \"dtype\": \"string\",\r\n \"id\": null\r\n },\r\n \"title\": {\r\n \"_type\": \"Value\",\r\n \"dtype\": \"string\",\r\n \"id\": null\r\n }\r\n },\r\n \"homepage\": \"https:\/\/rajpurkar.github.io\/SQuAD-explorer\/\",\r\n \"license\": \"\",\r\n \"post_processed\": {\r\n \"features\": null,\r\n \"resources_checksums\": {\r\n \"train\": {},\r\n \"train[:10%]\": {}\r\n }\r\n },\r\n \"post_processing_size\": 0,\r\n \"size_in_bytes\": 124932314,\r\n \"splits\": {\r\n \"train\": {\r\n \"dataset_name\": \"squad\",\r\n \"name\": \"train\",\r\n \"num_bytes\": 79317110,\r\n \"num_examples\": 87599\r\n },\r\n \"validation\": {\r\n \"dataset_name\": \"squad\",\r\n \"name\": \"validation\",\r\n \"num_bytes\": 10472653,\r\n \"num_examples\": 10570\r\n }\r\n },\r\n \"supervised_keys\": null,\r\n \"version\": {\r\n \"description\": \"New split API (https:\/\/tensorflow.org\/datasets\/splits)\",\r\n \"major\": 1,\r\n \"minor\": 0,\r\n \"nlp_version_to_prepare\": null,\r\n \"patch\": 0,\r\n \"version_str\": \"1.0.0\"\r\n }\r\n}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/571\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/571\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/570","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/570\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/570\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/570\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/570","id":691846397,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc4NTI3OTQz","number":570,"title":"add reuters21578 dataset","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-03T10:25:47Z","updated_at":"2020-09-03T10:46:52Z","closed_at":"2020-09-03T10:46:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/570","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/570","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/570.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/570.patch","merged_at":"2020-09-03T10:46:51Z"},"body":"Reopen a PR this the merge.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/570\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/570\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/569","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/569\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/569\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/569\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/569","id":691832720,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc4NTE2Mzc2","number":569,"title":"Revert \"add reuters21578 dataset\"","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-03T10:06:16Z","updated_at":"2020-09-03T10:07:13Z","closed_at":"2020-09-03T10:07:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/569","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/569","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/569.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/569.patch","merged_at":"2020-09-03T10:07:12Z"},"body":"Reverts huggingface\/nlp#471","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/569\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/569\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/568","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/568\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/568\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/568\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/568","id":691638656,"node_id":"MDU6SXNzdWU2OTE2Mzg2NTY=","number":568,"title":"`metric.compute` throws `ArrowInvalid` error","user":{"login":"ibeltagy","id":2287797,"node_id":"MDQ6VXNlcjIyODc3OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2287797?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ibeltagy","html_url":"https:\/\/github.com\/ibeltagy","followers_url":"https:\/\/api.github.com\/users\/ibeltagy\/followers","following_url":"https:\/\/api.github.com\/users\/ibeltagy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ibeltagy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ibeltagy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ibeltagy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ibeltagy\/orgs","repos_url":"https:\/\/api.github.com\/users\/ibeltagy\/repos","events_url":"https:\/\/api.github.com\/users\/ibeltagy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ibeltagy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-09-03T04:56:57Z","updated_at":"2020-10-05T16:33:53Z","closed_at":"2020-10-05T16:33:53Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I get the following error with `rouge.compute`. It happens only with distributed training, and it occurs randomly I can't easily reproduce it. This is using `nlp==0.4.0`\r\n\r\n```\r\n File \"\/home\/beltagy\/trainer.py\", line 92, in validation_step\r\n rouge_scores = rouge.compute(predictions=generated_str, references=gold_str, rouge_types=['rouge2', 'rouge1', 'rougeL'])\r\n File \"\/home\/beltagy\/miniconda3\/envs\/allennlp\/lib\/python3.7\/site-packages\/nlp\/metric.py\", line 224, in compute\r\n self.finalize(timeout=timeout)\r\n File \"\/home\/beltagy\/miniconda3\/envs\/allennlp\/lib\/python3.7\/site-packages\/nlp\/metric.py\", line 213, in finalize\r\n self.data = Dataset(**reader.read_files(node_files))\r\n File \"\/home\/beltagy\/miniconda3\/envs\/allennlp\/lib\/python3.7\/site-packages\/nlp\/arrow_reader.py\", line 217, in read_files\r\n dataset_kwargs = self._read_files(files=files, info=self._info, original_instructions=original_instructions)\r\n File \"\/home\/beltagy\/miniconda3\/envs\/allennlp\/lib\/python3.7\/site-packages\/nlp\/arrow_reader.py\", line 162, in _read_files\r\n pa_table: pa.Table = self._get_dataset_from_filename(f_dict)\r\n File \"\/home\/beltagy\/miniconda3\/envs\/allennlp\/lib\/python3.7\/site-packages\/nlp\/arrow_reader.py\", line 276, in _get_dataset_from_filename\r\n f = pa.ipc.open_stream(mmap)\r\n File \"\/home\/beltagy\/miniconda3\/envs\/allennlp\/lib\/python3.7\/site-packages\/pyarrow\/ipc.py\", line 173, in open_stream\r\n return RecordBatchStreamReader(source)\r\n File \"\/home\/beltagy\/miniconda3\/envs\/allennlp\/lib\/python3.7\/site-packages\/pyarrow\/ipc.py\", line 64, in __init__\r\n self._open(source)\r\n File \"pyarrow\/ipc.pxi\", line 469, in pyarrow.lib._RecordBatchStreamReader._open\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Tried reading schema message, was null or length 0\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/568\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/568\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/567","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/567\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/567\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/567\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/567","id":691430245,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc4MTc2Njgx","number":567,"title":"Fix BLEURT metrics for backward compatibility","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-02T21:22:35Z","updated_at":"2020-09-03T07:29:52Z","closed_at":"2020-09-03T07:29:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/567","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/567","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/567.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/567.patch","merged_at":"2020-09-03T07:29:50Z"},"body":"Fix #565","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/567\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/567\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/566","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/566\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/566\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/566\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/566","id":691160208,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3OTM2NTIz","number":566,"title":"Remove logger pickling to fix gg colab issues","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-02T16:16:21Z","updated_at":"2020-09-03T16:31:53Z","closed_at":"2020-09-03T16:31:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/566","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/566","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/566.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/566.patch","merged_at":"2020-09-03T16:31:52Z"},"body":"A `logger` objects are not picklable in google colab, contrary to `logger` objects in jupyter notebooks or in python shells.\r\nIt creates some issues in google colab right now.\r\n\r\nIndeed by calling any `Dataset` method, the fingerprint update pickles the transform function, and as the logger comes with it, it results in an error (full stacktrace [here](http:\/\/pastebin.fr\/64330)):\r\n\r\n```python\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/zmq\/backend\/cython\/socket.cpython-36m-x86_64-linux-gnu.so in zmq.backend.cython.socket.Socket.__reduce_cython__()\r\n\r\nTypeError: no default __reduce__ due to non-trivial __cinit__\r\n```\r\n\r\nTo fix that I no longer dump the transform (`_map_single`, `select`, etc.), but the full name only (`nlp.arrow_dataset.Dataset._map_single`, `nlp.arrow_dataset.Dataset.select`, etc.)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/566\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/566\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/565","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/565\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/565\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/565\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/565","id":691039121,"node_id":"MDU6SXNzdWU2OTEwMzkxMjE=","number":565,"title":"No module named 'nlp.logging'","user":{"login":"melody-ju","id":66633754,"node_id":"MDQ6VXNlcjY2NjMzNzU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66633754?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/melody-ju","html_url":"https:\/\/github.com\/melody-ju","followers_url":"https:\/\/api.github.com\/users\/melody-ju\/followers","following_url":"https:\/\/api.github.com\/users\/melody-ju\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/melody-ju\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/melody-ju\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/melody-ju\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/melody-ju\/orgs","repos_url":"https:\/\/api.github.com\/users\/melody-ju\/repos","events_url":"https:\/\/api.github.com\/users\/melody-ju\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/melody-ju\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-02T13:49:50Z","updated_at":"2020-09-03T07:29:50Z","closed_at":"2020-09-03T07:29:50Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I am using nlp version 0.4.0. Trying to use bleurt as an eval metric, however, the bleurt script imports nlp.logging which creates the following error. What am I missing?\r\n\r\n```\r\n>>> import nlp\r\n2020-09-02 13:47:09.210310: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcudart.so.10.1\r\n>>> bleurt = nlp.load_metric(\"bleurt\")\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/melody\/anaconda3\/envs\/transformers\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 443, in load_metric\r\n metric_cls = import_main_class(module_path, dataset=False)\r\n File \"\/home\/melody\/anaconda3\/envs\/transformers\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 61, in import_main_class\r\n module = importlib.import_module(module_path)\r\n File \"\/home\/melody\/anaconda3\/envs\/transformers\/lib\/python3.6\/importlib\/__init__.py\", line 126, in import_module\r\n return _bootstrap._gcd_import(name[level:], package, level)\r\n File \"\", line 994, in _gcd_import\r\n File \"\", line 971, in _find_and_load\r\n File \"\", line 955, in _find_and_load_unlocked\r\n File \"\", line 665, in _load_unlocked\r\n File \"\", line 678, in exec_module\r\n File \"\", line 219, in _call_with_frames_removed\r\n File \"\/home\/melody\/anaconda3\/envs\/transformers\/lib\/python3.6\/site-packages\/nlp\/metrics\/bleurt\/43448cf2959ea81d3ae0e71c5c8ee31dc15eed9932f197f5f50673cbcecff2b5\/bleurt.py\", line 20, in \r\n from nlp.logging import get_logger\r\nModuleNotFoundError: No module named 'nlp.logging'\r\n```\r\n\r\nJust to show once again that I can't import the logging module:\r\n\r\n```\r\n>>> import nlp\r\n2020-09-02 13:48:38.190621: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcudart.so.10.1\r\n>>> nlp.__version__\r\n'0.4.0'\r\n>>> from nlp.logging import get_logger\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\nModuleNotFoundError: No module named 'nlp.logging'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/565\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/565\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/564","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/564\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/564\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/564\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/564","id":691000020,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3ODAyMTk2","number":564,"title":"Wait for writing in distributed metrics","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-09-02T12:58:50Z","updated_at":"2020-09-09T09:13:23Z","closed_at":"2020-09-09T09:13:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/564","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/564","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/564.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/564.patch","merged_at":"2020-09-09T09:13:22Z"},"body":"There were CI bugs where a distributed metric would try to read all the files in process 0 while the other processes haven't started writing.\r\n\r\nTo fix that I added a custom locking mechanism that waits for the file to exist before trying to read it","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/564\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/564\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/563","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/563\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/563\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/563\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/563","id":690908674,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3NzI2MTEz","number":563,"title":"[Large datasets] Speed up download and processing","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-02T10:31:54Z","updated_at":"2020-09-09T09:03:33Z","closed_at":"2020-09-09T09:03:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/563","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/563","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/563.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/563.patch","merged_at":"2020-09-09T09:03:32Z"},"body":"Various improvements to speed-up creation and processing of large scale datasets.\r\n\r\nCurrently:\r\n- distributed downloads\r\n- remove etag from datafiles hashes to spare a request when restarting a failed download","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/563\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/563\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/562","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/562\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/562\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/562\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/562","id":690907604,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3NzI1MjMx","number":562,"title":"[Reproductibility] Allow to pin versions of datasets\/metrics","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-02T10:30:13Z","updated_at":"2020-09-09T13:04:54Z","closed_at":"2020-09-09T13:04:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/562","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/562","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/562.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/562.patch","merged_at":null},"body":"Repurpose the `version` attribute in datasets and metrics to let the user pin a specific version of datasets and metric scripts:\r\n```\r\ndataset = nlp.load_dataset('squad', version='1.0.0')\r\nmetric = nlp.load_metric('squad', version='1.0.0')\r\n```\r\n\r\nNotes:\r\n- version number are the release version of the library\r\n- currently only possible for canonical datasets\/metrics, ie. integrated in the GitHub repo of the library","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/562\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/562\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/561","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/561\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/561\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/561\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/561","id":690871415,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3Njk1NDQy","number":561,"title":"Made `share_dataset` more readable","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-02T09:34:48Z","updated_at":"2020-09-03T09:00:30Z","closed_at":"2020-09-03T09:00:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/561","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/561","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/561.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/561.patch","merged_at":"2020-09-03T09:00:29Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/561\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/561\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/560","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/560\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/560\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/560\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/560","id":690488764,"node_id":"MDU6SXNzdWU2OTA0ODg3NjQ=","number":560,"title":"Using custom DownloadConfig results in an error","user":{"login":"ynouri","id":1789921,"node_id":"MDQ6VXNlcjE3ODk5MjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1789921?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ynouri","html_url":"https:\/\/github.com\/ynouri","followers_url":"https:\/\/api.github.com\/users\/ynouri\/followers","following_url":"https:\/\/api.github.com\/users\/ynouri\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ynouri\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ynouri\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ynouri\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ynouri\/orgs","repos_url":"https:\/\/api.github.com\/users\/ynouri\/repos","events_url":"https:\/\/api.github.com\/users\/ynouri\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ynouri\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-09-01T22:23:02Z","updated_at":"2020-09-07T19:50:57Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"## Version \/ Environment\r\n\r\nUbuntu 18.04\r\nPython 3.6.8\r\nnlp 0.4.0\r\n\r\n## Description\r\n\r\nLoading `imdb` dataset works fine when when I don't specify any `download_config` argument. When I create a custom `DownloadConfig` object and pass it to the `nlp.load_dataset` function, this results in an error.\r\n\r\n## How to reproduce\r\n\r\n### Example without DownloadConfig --> works\r\n\r\n```python\r\nimport os\r\n\r\nos.environ[\"HF_HOME\"] = \"\/data\/hf-test-without-dl-config-01\/\"\r\n\r\nimport logging\r\nimport nlp\r\n\r\nlogging.basicConfig(level=logging.INFO)\r\n\r\nif __name__ == \"__main__\":\r\n imdb = nlp.load_dataset(path=\"imdb\")\r\n```\r\n\r\n### Example with DownloadConfig --> doesn't work\r\n\r\n```python\r\nimport os\r\n\r\nos.environ[\"HF_HOME\"] = \"\/data\/hf-test-with-dl-config-01\/\"\r\n\r\nimport logging\r\nimport nlp\r\nfrom nlp.utils import DownloadConfig\r\n\r\nlogging.basicConfig(level=logging.INFO)\r\n\r\nif __name__ == \"__main__\":\r\n download_config = DownloadConfig()\r\n imdb = nlp.load_dataset(path=\"imdb\", download_config=download_config)\r\n```\r\n\r\nError traceback:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\/...\/example_with_dl_config.py\", line 13, in \r\n imdb = nlp.load_dataset(path=\"imdb\", download_config=download_config)\r\n File \"\/...\/python3.6\/python3.6\/site-packages\/nlp\/load.py\", line 549, in load_dataset\r\n download_config=download_config, download_mode=download_mode, ignore_verifications=ignore_verifications,\r\n File \"\/...\/python3.6\/python3.6\/site-packages\/nlp\/builder.py\", line 463, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/...\/python3.6\/python3.6\/site-packages\/nlp\/builder.py\", line 518, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/...\/python3.6\/python3.6\/site-packages\/nlp\/datasets\/imdb\/76cdbd7249ea3548c928bbf304258dab44d09cd3638d9da8d42480d1d1be3743\/imdb.py\", line 86, in _split_generators\r\n arch_path = dl_manager.download_and_extract(_DOWNLOAD_URL)\r\n File \"\/...\/python3.6\/python3.6\/site-packages\/nlp\/utils\/download_manager.py\", line 220, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/...\/python3.6\/python3.6\/site-packages\/nlp\/utils\/download_manager.py\", line 158, in download\r\n self._record_sizes_checksums(url_or_urls, downloaded_path_or_paths)\r\n File \"\/...\/python3.6\/python3.6\/site-packages\/nlp\/utils\/download_manager.py\", line 108, in _record_sizes_checksums\r\n self._recorded_sizes_checksums[url] = get_size_checksum_dict(path)\r\n File \"\/...\/python3.6\/python3.6\/site-packages\/nlp\/utils\/info_utils.py\", line 79, in get_size_checksum_dict\r\n with open(path, \"rb\") as f:\r\nIsADirectoryError: [Errno 21] Is a directory: '\/data\/hf-test-with-dl-config-01\/datasets\/extracted\/b6802c5b61824b2c1f7dbf7cda6696b5f2e22214e18d171ce1ed3be90c931ce5'\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/560\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/560\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/559","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/559\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/559\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/559\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/559","id":690411263,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3MzAzOTM2","number":559,"title":"Adding the KILT knowledge source and tasks","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-09-01T20:05:13Z","updated_at":"2020-09-04T18:05:47Z","closed_at":"2020-09-04T18:05:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/559","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/559","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/559.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/559.patch","merged_at":"2020-09-04T18:05:47Z"},"body":"This adds Wikipedia pre-processed for KILT, as well as the task data. Only the question IDs are provided for TriviaQA, but they can easily be mapped back with:\r\n```\r\nimport nlp\r\n\r\nkilt_wikipedia = nlp.load_dataset('kilt_wikipedia')\r\n\r\nkilt_tasks = nlp.load_dataset('kilt_tasks')\r\ntriviaqa = nlp.load_dataset('trivia_qa', 'unfiltered.nocontext')\r\ntriviaqa_map = {}\r\nfor k in ['train', 'validation', 'test']:\r\n triviaqa_map = dict([(q_id, i) for i, q_id in enumerate(triviaqa[k]['question_id'])])\r\n kilt_tasks[k + '_triviaqa'] = kilt_tasks[k + '_triviaqa'].filter(lambda x: x['id'] in triviaqa_map)\r\n kilt_tasks[k + '_triviaqa'].map(lambda x: {'input': triviaqa[split][triviaqa_map[x['id']]]['question']})\r\n```\r\n\r\nIt would be great to have the dataset by Monday, which is when the paper should land on Arxiv and @fabiopetroni is planning on tweeting about the paper and `facebookresearch` repository for the datasett","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/559\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/559\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/558","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/558\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/558\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/558\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/558","id":690318105,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3MjI2ODA0","number":558,"title":"Rerun pip install -e","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-01T17:24:39Z","updated_at":"2020-09-01T17:24:51Z","closed_at":"2020-09-01T17:24:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/558","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/558","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/558.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/558.patch","merged_at":"2020-09-01T17:24:50Z"},"body":"Hopefully it fixes the github actions","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/558\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/558\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/557","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/557\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/557\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/557\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/557","id":690220135,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3MTQ1NjAx","number":557,"title":"Fix a few typos","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-01T15:03:24Z","updated_at":"2020-09-02T07:39:08Z","closed_at":"2020-09-02T07:39:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/557","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/557","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/557.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/557.patch","merged_at":"2020-09-02T07:39:06Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/557\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/557\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/556","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/556\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/556\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/556\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/556","id":690218423,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3MTQ0MTky","number":556,"title":"Add DailyDialog","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-01T15:01:15Z","updated_at":"2020-09-03T15:42:03Z","closed_at":"2020-09-03T15:38:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/556","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/556","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/556.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/556.patch","merged_at":"2020-09-03T15:38:39Z"},"body":"http:\/\/yanran.li\/dailydialog.html\r\n\r\nhttps:\/\/arxiv.org\/pdf\/1710.03957.pdf\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/556\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/556\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/555","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/555\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/555\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/555\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/555","id":690197725,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3MTI2OTIy","number":555,"title":"Upgrade pip in benchmark github action","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-01T14:37:26Z","updated_at":"2020-09-01T15:26:16Z","closed_at":"2020-09-01T15:26:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/555","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/555","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/555.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/555.patch","merged_at":"2020-09-01T15:26:15Z"},"body":"It looks like it fixes the `import nlp` issue we have","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/555\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/555\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/554","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/554\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/554\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/554\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/554","id":690173214,"node_id":"MDU6SXNzdWU2OTAxNzMyMTQ=","number":554,"title":"nlp downloads to its module path","user":{"login":"danieldk","id":49398,"node_id":"MDQ6VXNlcjQ5Mzk4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49398?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danieldk","html_url":"https:\/\/github.com\/danieldk","followers_url":"https:\/\/api.github.com\/users\/danieldk\/followers","following_url":"https:\/\/api.github.com\/users\/danieldk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danieldk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danieldk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danieldk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danieldk\/orgs","repos_url":"https:\/\/api.github.com\/users\/danieldk\/repos","events_url":"https:\/\/api.github.com\/users\/danieldk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danieldk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-09-01T14:06:14Z","updated_at":"2020-09-11T06:19:24Z","closed_at":"2020-09-11T06:19:24Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to package `nlp` for Nix, because it is now an optional dependency for `transformers`. The problem that I encounter is that the `nlp` library downloads to the module path, which is typically not writable in most package management systems:\r\n\r\n```>>> import nlp\r\n>>> squad_dataset = nlp.load_dataset('squad')\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/nix\/store\/2yhik0hhqayksmkkfb0ylqp8cf5wa5wp-python3-3.8.5-env\/lib\/python3.8\/site-packages\/nlp\/load.py\", line 530, in load_dataset\r\n module_path, hash = prepare_module(path, download_config=download_config, dataset=True)\r\n File \"\/nix\/store\/2yhik0hhqayksmkkfb0ylqp8cf5wa5wp-python3-3.8.5-env\/lib\/python3.8\/site-packages\/nlp\/load.py\", line 329, in prepare_module\r\n os.makedirs(main_folder_path, exist_ok=True)\r\n File \"\/nix\/store\/685kq8pyhrvajah1hdsfn4q7gm3j4yd4-python3-3.8.5\/lib\/python3.8\/os.py\", line 223, in makedirs\r\n mkdir(name, mode)\r\nOSError: [Errno 30] Read-only file system: '\/nix\/store\/2yhik0hhqayksmkkfb0ylqp8cf5wa5wp-python3-3.8.5-env\/lib\/python3.8\/site-packages\/nlp\/datasets\/squad'\r\n```\r\n\r\nDo you have any suggested workaround for this issue?\r\n\r\nPerhaps overriding the default value for `force_local_path` of `prepare_module`?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/554\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/554\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/553","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/553\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/553\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/553\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/553","id":690143182,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3MDgxNTg2","number":553,"title":"[Fix GitHub Actions] test adding tmate","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-01T13:28:03Z","updated_at":"2021-05-05T18:24:38Z","closed_at":"2020-09-03T09:01:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/553","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/553","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/553.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/553.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/553\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/553\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/552","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/552\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/552\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/552\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/552","id":690079429,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc3MDI4MzMx","number":552,"title":"Add multiprocessing","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-09-01T11:56:17Z","updated_at":"2020-09-22T15:11:56Z","closed_at":"2020-09-02T10:01:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/552","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/552","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/552.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/552.patch","merged_at":"2020-09-02T10:01:25Z"},"body":"Adding multiprocessing to `.map`\r\n\r\nIt works in 3 steps:\r\n- shard the dataset in `num_proc` shards\r\n- spawn one process per shard and call `map` on them\r\n- concatenate the resulting datasets\r\n\r\nExample of usage:\r\n\r\n```python\r\nfrom nlp import load_dataset\r\n\r\ndataset = load_dataset(\"squad\", split=\"train\")\r\n\r\ndef function(x):\r\n return {\"lowered\": x.lower()}\r\n\r\nprocessed = d.map(\r\n function,\r\n input_columns=[\"context\"],\r\n num_proc=4,\r\n cache_file_name=\"playground\/tmp.arrow\",\r\n load_from_cache_file=False\r\n)\r\n```\r\n\r\nHere it writes 4 files depending on the process rank:\r\n- `playground\/tmp_00000_of_00004.arrow`\r\n- `playground\/tmp_00001_of_00004.arrow`\r\n- `playground\/tmp_00002_of_00004.arrow`\r\n- `playground\/tmp_00003_of_00004.arrow`\r\n\r\nThe suffix format can be specified by the user.\r\n\r\nIf the `cache_file_name` is not specified, it writes into separated files depending on the fingerprint, as usual.\r\n\r\nI still need to:\r\n- write tests for this\r\n- try to improve the logging (currently it shows 4 progress bars, but if one finishes before the others, then the following messages are written over the progress bars)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/552\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/552\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/551","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/551\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/551\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/551\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/551","id":690034762,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc2OTkwNjAw","number":551,"title":"added HANS dataset","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-09-01T10:42:02Z","updated_at":"2020-09-01T12:17:10Z","closed_at":"2020-09-01T12:17:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/551","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/551","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/551.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/551.patch","merged_at":"2020-09-01T12:17:10Z"},"body":"Adds the [HANS](https:\/\/github.com\/tommccoy1\/hans) dataset to evaluate NLI systems.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/551\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/551\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/550","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/550\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/550\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/550\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/550","id":689775914,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc2NzgyNDY1","number":550,"title":"[BUGFIX] Solving mismatched checksum issue for the LinCE dataset (#539)","user":{"login":"gaguilar","id":5833357,"node_id":"MDQ6VXNlcjU4MzMzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5833357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gaguilar","html_url":"https:\/\/github.com\/gaguilar","followers_url":"https:\/\/api.github.com\/users\/gaguilar\/followers","following_url":"https:\/\/api.github.com\/users\/gaguilar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gaguilar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gaguilar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gaguilar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gaguilar\/orgs","repos_url":"https:\/\/api.github.com\/users\/gaguilar\/repos","events_url":"https:\/\/api.github.com\/users\/gaguilar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gaguilar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-01T03:27:03Z","updated_at":"2020-09-03T09:06:01Z","closed_at":"2020-09-03T09:06:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/550","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/550","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/550.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/550.patch","merged_at":"2020-09-03T09:06:01Z"},"body":"Hi,\r\n\r\nI have added the updated `dataset_infos.json` file for the LinCE benchmark. This update is to fix the mismatched checksum bug #539 for one of the datasets in the LinCE benchmark. To update the file, I run this command from the nlp root directory:\r\n\r\n```\r\npython nlp-cli test .\/datasets\/lince --save_infos --all_configs\r\n```\r\n\r\n**NOTE**: I needed to change [this line](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/src\/nlp\/commands\/dummy_data.py#L8) from: `from .utils.logging import get_logger` to `from nlp.utils.logging import get_logger`, otherwise the script was not able to import `get_logger`. However, I did not include that in this PR since that could have been just my environment (and another PR could be fixing this already if it is actually an issue).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/550\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/550\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/549","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/549\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/549\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/549\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/549","id":689766465,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc2Nzc0OTI1","number":549,"title":"Fix bleurt logging import","user":{"login":"jbragg","id":2238344,"node_id":"MDQ6VXNlcjIyMzgzNDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2238344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jbragg","html_url":"https:\/\/github.com\/jbragg","followers_url":"https:\/\/api.github.com\/users\/jbragg\/followers","following_url":"https:\/\/api.github.com\/users\/jbragg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jbragg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jbragg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jbragg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jbragg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jbragg\/repos","events_url":"https:\/\/api.github.com\/users\/jbragg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jbragg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-09-01T03:01:25Z","updated_at":"2020-09-03T18:04:46Z","closed_at":"2020-09-03T09:04:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/549","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/549","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/549.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/549.patch","merged_at":null},"body":"Bleurt started throwing an error in some code we have.\r\nThis looks like the fix but...\r\n\r\nIt's also unnerving that even a prebuilt docker image with pinned versions can be working 1 day and then fail the next (especially for production systems).\r\n\r\nAny way for us to pin your metrics code so that they are guaranteed not to to change and possibly fail on repository changes?\r\n\r\nThanks (and also for your continued work on the lib...)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/549\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/549\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/548","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/548\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/548\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/548\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/548","id":689285996,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc2MzYzMjU1","number":548,"title":"[Breaking] Switch text loading to multi-threaded PyArrow loading","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-08-31T15:15:41Z","updated_at":"2020-09-08T10:19:58Z","closed_at":"2020-09-08T10:19:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/548","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/548","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/548.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/548.patch","merged_at":"2020-09-08T10:19:57Z"},"body":"Test if we can get better performances for large-scale text datasets by using multi-threaded text file loading based on Apache Arrow multi-threaded CSV loader.\r\n\r\nIf it works ok, it would fix #546.\r\n\r\n**Breaking change**:\r\nThe text lines now do not include final line-breaks anymore.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/548\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/548\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/547","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/547\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/547\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/547\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/547","id":689268589,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc2MzQ4OTk5","number":547,"title":"[Distributed] Making loading distributed datasets a bit safer","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-31T14:51:34Z","updated_at":"2020-08-31T15:16:30Z","closed_at":"2020-08-31T15:16:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/547","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/547","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/547.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/547.patch","merged_at":"2020-08-31T15:16:29Z"},"body":"Add some file-locks during dataset loading","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/547\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/547\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/546","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/546\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/546\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/546\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/546","id":689186526,"node_id":"MDU6SXNzdWU2ODkxODY1MjY=","number":546,"title":"Very slow data loading on large dataset","user":{"login":"agemagician","id":6087313,"node_id":"MDQ6VXNlcjYwODczMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6087313?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/agemagician","html_url":"https:\/\/github.com\/agemagician","followers_url":"https:\/\/api.github.com\/users\/agemagician\/followers","following_url":"https:\/\/api.github.com\/users\/agemagician\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/agemagician\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/agemagician\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/agemagician\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/agemagician\/orgs","repos_url":"https:\/\/api.github.com\/users\/agemagician\/repos","events_url":"https:\/\/api.github.com\/users\/agemagician\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/agemagician\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":17,"created_at":"2020-08-31T12:57:23Z","updated_at":"2021-10-08T15:12:17Z","closed_at":"2020-09-08T10:19:57Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I made a simple python script to check the NLP library speed, which loads 1.1 TB of textual data.\r\nIt has been 8 hours and still, it is on the loading steps.\r\nIt does work when the text dataset size is small about 1 GB, but it doesn't scale.\r\nIt also uses a single thread during the data loading step.\r\n\r\n```\r\ntrain_files = glob.glob(\"xxx\/*.txt\",recursive=True)\r\nrandom.shuffle(train_files)\r\n\r\nprint(train_files)\r\n\r\ndataset = nlp.load_dataset('text', \r\n data_files=train_files,\r\n name=\"customDataset\",\r\n version=\"1.0.0\",\r\n cache_dir=\"xxx\/nlp\")\r\n```\r\n\r\nIs there something that I am missing ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/546\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/546\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/545","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/545\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/545\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/545\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/545","id":689138878,"node_id":"MDU6SXNzdWU2ODkxMzg4Nzg=","number":545,"title":"New release coming up for this library","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-31T11:37:38Z","updated_at":"2021-01-13T10:59:04Z","closed_at":"2021-01-13T10:59:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi all,\r\nA few words on the roadmap for this library.\r\n\r\nThe next release will be a big one and is planed at the end of this week.\r\n\r\nIn addition to the support for indexed datasets (useful for non-parametric models like REALM, RAG, DPR, knn-LM and many other fast dataset retrieval technics), it will:\r\n- have support for multi-modal datasets\r\n- include various significant improvements on speed for standard processing (map, shuffling, ...)\r\n- have a better support for metrics (better caching, and a robust API) and a bigger focus on reproductibility\r\n- change the name to the final name (voted by the community): `datasets`\r\n- be the 1.0.0 release as we think the API will be mostly stabilized from now on","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/545\/reactions","total_count":4,"+1":0,"-1":0,"laugh":0,"hooray":4,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/545\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/544","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/544\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/544\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/544\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/544","id":689062519,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc2MTc4MDM2","number":544,"title":"[Distributed] Fix load_dataset error when multiprocessing + add test","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-31T09:30:10Z","updated_at":"2020-08-31T11:15:11Z","closed_at":"2020-08-31T11:15:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/544","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/544","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/544.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/544.patch","merged_at":"2020-08-31T11:15:10Z"},"body":"Fix #543 + add test","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/544\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/544\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/543","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/543\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/543\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/543\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/543","id":688644407,"node_id":"MDU6SXNzdWU2ODg2NDQ0MDc=","number":543,"title":"nlp.load_dataset is not safe for multi processes when loading from local files","user":{"login":"luyug","id":55288513,"node_id":"MDQ6VXNlcjU1Mjg4NTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55288513?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/luyug","html_url":"https:\/\/github.com\/luyug","followers_url":"https:\/\/api.github.com\/users\/luyug\/followers","following_url":"https:\/\/api.github.com\/users\/luyug\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/luyug\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/luyug\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/luyug\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/luyug\/orgs","repos_url":"https:\/\/api.github.com\/users\/luyug\/repos","events_url":"https:\/\/api.github.com\/users\/luyug\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/luyug\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-30T03:20:34Z","updated_at":"2020-08-31T11:15:10Z","closed_at":"2020-08-31T11:15:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Loading from local files, e.g., `dataset = nlp.load_dataset('csv', data_files=['file_1.csv', 'file_2.csv'])`\r\nconcurrently from multiple processes, will raise `FileExistsError` from builder's line 430, https:\/\/github.com\/huggingface\/nlp\/blob\/6655008c738cb613c522deb3bd18e35a67b2a7e5\/src\/nlp\/builder.py#L423-L438\r\n\r\nLikely because multiple processes step into download_and_prepare, https:\/\/github.com\/huggingface\/nlp\/blob\/6655008c738cb613c522deb3bd18e35a67b2a7e5\/src\/nlp\/load.py#L550-L554\r\n\r\nThis can happen when launching distributed training with commands like `python -m torch.distributed.launch --nproc_per_node 4` on a new collection of files never loaded before.\r\n\r\nI can create a PR that puts in some file locks. It would be helpful if I can be informed of the convention for naming and placement of the lock.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/543\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/543\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/542","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/542\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/542\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/542\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/542","id":688555036,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc1NzkyNTY0","number":542,"title":"Add TensorFlow example","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-29T15:39:27Z","updated_at":"2020-08-31T09:49:20Z","closed_at":"2020-08-31T09:49:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/542","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/542","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/542.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/542.patch","merged_at":"2020-08-31T09:49:19Z"},"body":"Update the Quick Tour documentation in order to add the TensorFlow equivalent source code for the classification example. Now it is possible to select either the code in PyTorch or in TensorFlow in the Quick tour.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/542\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/542\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/541","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/541\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/541\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/541\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/541","id":688521224,"node_id":"MDU6SXNzdWU2ODg1MjEyMjQ=","number":541,"title":"Best practices for training tokenizers with nlp","user":{"login":"moskomule","id":11806234,"node_id":"MDQ6VXNlcjExODA2MjM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11806234?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/moskomule","html_url":"https:\/\/github.com\/moskomule","followers_url":"https:\/\/api.github.com\/users\/moskomule\/followers","following_url":"https:\/\/api.github.com\/users\/moskomule\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/moskomule\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/moskomule\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/moskomule\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/moskomule\/orgs","repos_url":"https:\/\/api.github.com\/users\/moskomule\/repos","events_url":"https:\/\/api.github.com\/users\/moskomule\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/moskomule\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-29T12:06:49Z","updated_at":"2020-08-29T12:07:00Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, thank you for developing this library. \r\n\r\nWhat do you think are the best practices for training tokenizers using `nlp`? In the document and examples, I could only find pre-trained tokenizers used.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/541\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/541\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/540","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/540\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/540\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/540\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/540","id":688475884,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc1NzMzNzMz","number":540,"title":"[BUGFIX] Fix Race Dataset Checksum bug","user":{"login":"abarbosa94","id":6608232,"node_id":"MDQ6VXNlcjY2MDgyMzI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6608232?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abarbosa94","html_url":"https:\/\/github.com\/abarbosa94","followers_url":"https:\/\/api.github.com\/users\/abarbosa94\/followers","following_url":"https:\/\/api.github.com\/users\/abarbosa94\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abarbosa94\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abarbosa94\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abarbosa94\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abarbosa94\/orgs","repos_url":"https:\/\/api.github.com\/users\/abarbosa94\/repos","events_url":"https:\/\/api.github.com\/users\/abarbosa94\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abarbosa94\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-08-29T07:00:10Z","updated_at":"2020-09-18T11:42:20Z","closed_at":"2020-09-18T11:42:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/540","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/540","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/540.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/540.patch","merged_at":"2020-09-18T11:42:20Z"},"body":"In #537 I noticed that there was a bug in checksum checking when I have tried to download the race dataset. The reason for this is that the current preprocessing was just considering the `high school` data and it was ignoring the `middle` one. This PR just fixes it :)\r\n\r\nMoreover, I have added some descriptions.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/540\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/540\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/539","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/539\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/539\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/539\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/539","id":688323602,"node_id":"MDU6SXNzdWU2ODgzMjM2MDI=","number":539,"title":"[Dataset] `NonMatchingChecksumError` due to an update in the LinCE benchmark data","user":{"login":"gaguilar","id":5833357,"node_id":"MDQ6VXNlcjU4MzMzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5833357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gaguilar","html_url":"https:\/\/github.com\/gaguilar","followers_url":"https:\/\/api.github.com\/users\/gaguilar\/followers","following_url":"https:\/\/api.github.com\/users\/gaguilar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gaguilar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gaguilar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gaguilar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gaguilar\/orgs","repos_url":"https:\/\/api.github.com\/users\/gaguilar\/repos","events_url":"https:\/\/api.github.com\/users\/gaguilar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gaguilar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-08-28T19:55:51Z","updated_at":"2020-09-03T16:34:02Z","closed_at":"2020-09-03T16:34:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nThere is a `NonMatchingChecksumError` error for the `lid_msaea` (language identification for Modern Standard Arabic - Egyptian Arabic) dataset from the LinCE benchmark due to a minor update on that dataset. \r\n\r\nHow can I update the checksum of the library to solve this issue? The error is below and it also appears in the [nlp viewer](https:\/\/huggingface.co\/nlp\/viewer\/?dataset=lince&config=lid_msaea):\r\n\r\n```python\r\nimport nlp\r\nnlp.load_dataset('lince', 'lid_msaea')\r\n```\r\n\r\nOutput:\r\n```\r\nNonMatchingChecksumError: ['https:\/\/ritual.uh.edu\/lince\/libaccess\/eyJ1c2VybmFtZSI6ICJodWdnaW5nZmFjZSBubHAiLCAidXNlcl9pZCI6IDExMSwgImVtYWlsIjogImR1bW15QGVtYWlsLmNvbSJ9\/lid_msaea.zip']\r\nTraceback:\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/ScriptRunner.py\", line 322, in _run_script\r\n exec(code, module.__dict__)\r\nFile \"\/home\/sasha\/nlp-viewer\/run.py\", line 196, in \r\n dts, fail = get(str(option.id), str(conf_option.name) if conf_option else None)\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/caching.py\", line 591, in wrapped_func\r\n return get_or_create_cached_value()\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/caching.py\", line 575, in get_or_create_cached_value\r\n return_value = func(*args, **kwargs)\r\nFile \"\/home\/sasha\/nlp-viewer\/run.py\", line 150, in get\r\n builder_instance.download_and_prepare()\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 432, in download_and_prepare\r\n download_config.force_download = download_mode == FORCE_REDOWNLOAD\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 469, in _download_and_prepare\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/nlp\/utils\/info_utils.py\", line 36, in verify_checksums\r\n raise NonMatchingChecksumError(str(bad_urls))\r\n```\r\n\r\nThank you in advance!\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/539\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/539\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/538","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/538\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/538\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/538\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/538","id":688015912,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc1MzU3MjY2","number":538,"title":"[logging] Add centralized logging - Bump-up cache loads to warnings","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-28T11:42:29Z","updated_at":"2020-08-31T11:42:51Z","closed_at":"2020-08-31T11:42:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/538","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/538","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/538.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/538.patch","merged_at":"2020-08-31T11:42:50Z"},"body":"Add a `nlp.logging` module to set the global logging level easily. The verbosity level also controls the tqdm bars (disabled when set higher than INFO).\r\n\r\nYou can use:\r\n```\r\nnlp.logging.set_verbosity(verbosity: int)\r\nnlp.logging.set_verbosity_info()\r\nnlp.logging.set_verbosity_warning()\r\nnlp.logging.set_verbosity_debug()\r\nnlp.logging.set_verbosity_error()\r\nnlp.logging.get_verbosity() -> int\r\n```\r\nAnd use the levels:\r\n```\r\nnlp.logging.CRITICAL\r\nnlp.logging.DEBUG\r\nnlp.logging.ERROR\r\nnlp.logging.FATAL\r\nnlp.logging.INFO\r\nnlp.logging.NOTSET\r\nnlp.logging.WARN\r\nnlp.logging.WARNING\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/538\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/538\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/537","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/537\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/537\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/537\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/537","id":687614699,"node_id":"MDU6SXNzdWU2ODc2MTQ2OTk=","number":537,"title":"[Dataset] RACE dataset Checksums error","user":{"login":"abarbosa94","id":6608232,"node_id":"MDQ6VXNlcjY2MDgyMzI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6608232?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abarbosa94","html_url":"https:\/\/github.com\/abarbosa94","followers_url":"https:\/\/api.github.com\/users\/abarbosa94\/followers","following_url":"https:\/\/api.github.com\/users\/abarbosa94\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abarbosa94\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abarbosa94\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abarbosa94\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abarbosa94\/orgs","repos_url":"https:\/\/api.github.com\/users\/abarbosa94\/repos","events_url":"https:\/\/api.github.com\/users\/abarbosa94\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abarbosa94\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-08-27T23:58:16Z","updated_at":"2020-09-18T12:07:04Z","closed_at":"2020-09-18T12:07:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi there, I just would like to use this awesome lib to perform a dataset fine-tuning on RACE dataset. I have performed the following steps:\r\n\r\n```\r\ndataset = nlp.load_dataset(\"race\")\r\nlen(dataset[\"train\"]), len(dataset[\"validation\"])\r\n```\r\n\r\nBut then I got the following error:\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nNonMatchingChecksumError Traceback (most recent call last)\r\n in \r\n----> 1 dataset = nlp.load_dataset(\"race\")\r\n 2 len(dataset[\"train\"]), len(dataset[\"validation\"])\r\n\r\n~\/miniconda3\/envs\/masters\/lib\/python3.8\/site-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n 546 \r\n 547 # Download and prepare data\r\n--> 548 builder_instance.download_and_prepare(\r\n 549 download_config=download_config, download_mode=download_mode, ignore_verifications=ignore_verifications,\r\n 550 )\r\n\r\n~\/miniconda3\/envs\/masters\/lib\/python3.8\/site-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n 460 logger.info(\"Dataset not on Hf google storage. Downloading and preparing it from source\")\r\n 461 if not downloaded_from_gcs:\r\n--> 462 self._download_and_prepare(\r\n 463 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 464 )\r\n\r\n~\/miniconda3\/envs\/masters\/lib\/python3.8\/site-packages\/nlp\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 519 # Checksums verification\r\n 520 if verify_infos:\r\n--> 521 verify_checksums(\r\n 522 self.info.download_checksums, dl_manager.get_recorded_sizes_checksums(), \"dataset source files\"\r\n 523 )\r\n\r\n~\/miniconda3\/envs\/masters\/lib\/python3.8\/site-packages\/nlp\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 36 if len(bad_urls) > 0:\r\n 37 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 38 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 39 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 40 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['http:\/\/www.cs.cmu.edu\/~glai1\/data\/race\/RACE.tar.gz']\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/537\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/537\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/536","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/536\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/536\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/536\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/536","id":687378332,"node_id":"MDExOlB1bGxSZXF1ZXN0NDc0ODE0NzY1","number":536,"title":"Fingerprint","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-27T16:27:09Z","updated_at":"2020-08-31T14:20:40Z","closed_at":"2020-08-31T14:20:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/536","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/536","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/536.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/536.patch","merged_at":"2020-08-31T14:20:39Z"},"body":"This PR is a continuation of #513 , in which many in-place functions were introduced or updated (cast_, flatten_) etc.\r\nHowever the caching didn't handle these changes. Indeed the caching took into account only the previous cache file name of the table, and not the possible in-place transforms of the table.\r\n\r\nTo fix that, I added the concept of dataset fingerprint, that is updated after each transform (in place or not), and stored inside the table metadata.\r\n\r\nWhen a dataset is created, an initial fingerprint is computed. If the dataset is memory-mapped, then the fingerprint generator doesn't read the table and only looks at the filename. However if the table is in-memory, then the fingerprint generator reads the content of the table using a batched non-crypto hashing.\r\n\r\nI added a utility class to compute hashes of arbitrary python objects in `fingerprint.py` : `Hasher`. The API is close to standard hashing tools (`.update`, `.hexdigest`). It also supports custom hashing functions depending on object types using a registry like pickle. I added a custom hashing function to hash a `pa.Table` in a batched way, and also for `nlp.DatasetInfo` to leverage its json serialization feature.\r\n\r\nNote about this PR:\r\nThis is a draft PR because #513 needs to be merged first.\r\nThe diff that is shown is for branches fingerprint -> indices (and not master, for now)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/536\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/536\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/535","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/535\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/535\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/535\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/535","id":686238315,"node_id":"MDExOlB1bGxSZXF1ZXN0NDczODM3Njg0","number":535,"title":"Benchmarks","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-26T11:21:26Z","updated_at":"2020-08-27T08:40:00Z","closed_at":"2020-08-27T08:39:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/535","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/535","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/535.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/535.patch","merged_at":"2020-08-27T08:39:59Z"},"body":"Adding some benchmarks with DVC\/CML\r\n\r\nTo add a new tracked benchmark:\r\n- create a new python benchmarking script in `.\/benchmarks\/`. The script can use the utilities in `.\/benchmarks\/utils.py` and should output a JSON file with results in `.\/benchmarks\/results\/`.\r\n- add a new pipeline stage in [dvc.yaml](.\/dvc.yaml) with the name of your new benchmark.\r\n\r\nThat's it","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/535\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/535\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/534","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/534\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/534\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/534\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/534","id":686115912,"node_id":"MDU6SXNzdWU2ODYxMTU5MTI=","number":534,"title":"`list_datasets()` is broken.","user":{"login":"ashutosh-dwivedi-e3502","id":314169,"node_id":"MDQ6VXNlcjMxNDE2OQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/314169?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ashutosh-dwivedi-e3502","html_url":"https:\/\/github.com\/ashutosh-dwivedi-e3502","followers_url":"https:\/\/api.github.com\/users\/ashutosh-dwivedi-e3502\/followers","following_url":"https:\/\/api.github.com\/users\/ashutosh-dwivedi-e3502\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ashutosh-dwivedi-e3502\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ashutosh-dwivedi-e3502\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ashutosh-dwivedi-e3502\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ashutosh-dwivedi-e3502\/orgs","repos_url":"https:\/\/api.github.com\/users\/ashutosh-dwivedi-e3502\/repos","events_url":"https:\/\/api.github.com\/users\/ashutosh-dwivedi-e3502\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ashutosh-dwivedi-e3502\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-08-26T08:19:01Z","updated_at":"2020-08-27T06:31:11Z","closed_at":"2020-08-27T06:31:11Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"version = '0.4.0'\r\n\r\n`list_datasets()` is broken. It results in the following error : \r\n\r\n```\r\nIn [3]: nlp.list_datasets()\r\nOut[3]: ---------------------------------------------------------------------------\r\nAttributeError Traceback (most recent call last)\r\n~\/.virtualenvs\/san-lgUCsFg_\/lib\/python3.8\/site-packages\/IPython\/core\/formatters.py in __call__(self, obj)\r\n 700 type_pprinters=self.type_printers,\r\n 701 deferred_pprinters=self.deferred_printers)\r\n--> 702 printer.pretty(obj)\r\n 703 printer.flush()\r\n 704 return stream.getvalue()\r\n\r\n~\/.virtualenvs\/san-lgUCsFg_\/lib\/python3.8\/site-packages\/IPython\/lib\/pretty.py in pretty(self, obj)\r\n 375 if cls in self.type_pprinters:\r\n 376 # printer registered in self.type_pprinters\r\n--> 377 return self.type_pprinters[cls](obj, self, cycle)\r\n 378 else:\r\n 379 # deferred printer\r\n\r\n~\/.virtualenvs\/san-lgUCsFg_\/lib\/python3.8\/site-packages\/IPython\/lib\/pretty.py in inner(obj, p, cycle)\r\n 553 p.text(',')\r\n 554 p.breakable()\r\n--> 555 p.pretty(x)\r\n 556 if len(obj) == 1 and type(obj) is tuple:\r\n 557 # Special case for 1-item tuples.\r\n\r\n~\/.virtualenvs\/san-lgUCsFg_\/lib\/python3.8\/site-packages\/IPython\/lib\/pretty.py in pretty(self, obj)\r\n 392 if cls is not object \\\r\n 393 and callable(cls.__dict__.get('__repr__')):\r\n--> 394 return _repr_pprint(obj, self, cycle)\r\n 395\r\n 396 return _default_pprint(obj, self, cycle)\r\n\r\n~\/.virtualenvs\/san-lgUCsFg_\/lib\/python3.8\/site-packages\/IPython\/lib\/pretty.py in _repr_pprint(obj, p, cycle)\r\n 698 \"\"\"A pprint that just redirects to the normal repr function.\"\"\"\r\n 699 # Find newlines and replace them with p.break_()\r\n--> 700 output = repr(obj)\r\n 701 lines = output.splitlines()\r\n 702 with p.group():\r\n\r\n~\/.virtualenvs\/san-lgUCsFg_\/lib\/python3.8\/site-packages\/nlp\/hf_api.py in __repr__(self)\r\n 110\r\n 111 def __repr__(self):\r\n--> 112 single_line_description = self.description.replace(\"\\n\", \"\")\r\n 113 return f\"nlp.ObjectInfo(id='{self.id}', description='{single_line_description}', files={self.siblings})\"\r\n 114\r\n\r\nAttributeError: 'NoneType' object has no attribute 'replace'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/534\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/534\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/533","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/533\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/533\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/533\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/533","id":685585914,"node_id":"MDExOlB1bGxSZXF1ZXN0NDczMjg4OTgx","number":533,"title":"Fix ArrayXD for pyarrow 0.17.1 by using non fixed length list arrays","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-25T15:32:44Z","updated_at":"2020-08-26T08:02:24Z","closed_at":"2020-08-26T08:02:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/533","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/533","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/533.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/533.patch","merged_at":"2020-08-26T08:02:23Z"},"body":"It should fix the CI problems in #513 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/533\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/533\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/532","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/532\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/532\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/532\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/532","id":685540614,"node_id":"MDU6SXNzdWU2ODU1NDA2MTQ=","number":532,"title":"File exists error when used with TPU","user":{"login":"go-inoue","id":20531705,"node_id":"MDQ6VXNlcjIwNTMxNzA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20531705?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/go-inoue","html_url":"https:\/\/github.com\/go-inoue","followers_url":"https:\/\/api.github.com\/users\/go-inoue\/followers","following_url":"https:\/\/api.github.com\/users\/go-inoue\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/go-inoue\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/go-inoue\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/go-inoue\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/go-inoue\/orgs","repos_url":"https:\/\/api.github.com\/users\/go-inoue\/repos","events_url":"https:\/\/api.github.com\/users\/go-inoue\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/go-inoue\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":21,"created_at":"2020-08-25T14:36:38Z","updated_at":"2020-09-01T12:14:56Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI'm getting a \"File exists\" error when I use [text dataset](https:\/\/github.com\/huggingface\/nlp\/tree\/master\/datasets\/text) for pre-training a RoBERTa model using `transformers` (3.0.2) and `nlp`(0.4.0) on a VM with TPU (v3-8).\r\n\r\nI modified [line 131 in the original `run_language_modeling.py`](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/language-modeling\/run_language_modeling.py#L131) as follows:\r\n\r\n```python\r\n# line 131: return LineByLineTextDataset(tokenizer=tokenizer, file_path=file_path, block_size=args.block_size)\r\ndataset = load_dataset(\"text\", data_files=file_path, split=\"train\")\r\ndataset = dataset.map(lambda ex: tokenizer(ex[\"text\"], add_special_tokens=True,\r\n truncation=True, max_length=args.block_size), batched=True)\r\ndataset.set_format(type='torch', columns=['input_ids'])\r\nreturn dataset\r\n```\r\n\r\nWhen I run this with [`xla_spawn.py`](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/xla_spawn.py), I get the following error (it produces one message per core in TPU, which I believe is fine).\r\n\r\nIt seems the current version doesn't take into account distributed training processes as in [this example](https:\/\/github.com\/huggingface\/transformers\/blob\/a573777901e662ec2e565be312ffaeedef6effec\/src\/transformers\/data\/datasets\/language_modeling.py#L35-L38)?\r\n\r\n```\r\n08\/25\/2020 13:59:41 - WARNING - nlp.builder - Using custom data configuration default\r\n08\/25\/2020 13:59:43 - INFO - nlp.builder - Generating dataset text (\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d)\r\n08\/25\/2020 13:59:43 - INFO - nlp.builder - Generating dataset text (\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d)\r\n08\/25\/2020 13:59:43 - INFO - nlp.builder - Generating dataset text (\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d)\r\n08\/25\/2020 13:59:43 - INFO - nlp.builder - Generating dataset text (\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d)\r\n08\/25\/2020 13:59:43 - INFO - nlp.builder - Generating dataset text (\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d)\r\n08\/25\/2020 13:59:43 - INFO - nlp.builder - Generating dataset text (\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d)\r\n08\/25\/2020 13:59:43 - INFO - nlp.builder - Generating dataset text (\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d)\r\n08\/25\/2020 13:59:43 - INFO - nlp.builder - Generating dataset text (\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d)\r\nDownloading and preparing dataset text\/default-b0932b2bdbb63283 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/\r\n447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d...\r\nDownloading and preparing dataset text\/default-b0932b2bdbb63283 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/\r\n447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d...\r\nDownloading and preparing dataset text\/default-b0932b2bdbb63283 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/\r\n447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d...\r\nDownloading and preparing dataset text\/default-b0932b2bdbb63283 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/\r\n447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d...\r\nDownloading and preparing dataset text\/default-b0932b2bdbb63283 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/\r\n447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d...\r\nDownloading and preparing dataset text\/default-b0932b2bdbb63283 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/\r\n447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d...\r\nException in device=TPU:6: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\nException in device=TPU:4: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\nException in device=TPU:1: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\nDownloading and preparing dataset text\/default-b0932b2bdbb63283 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/\r\n447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d...\r\nException in device=TPU:7: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\nException in device=TPU:3: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\nDownloading and preparing dataset text\/default-b0932b2bdbb63283 (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/\r\n447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d...\r\nException in device=TPU:2: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\nException in device=TPU:0: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\nTraceback (most recent call last):\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 231, in _start_fn\r\n fn(gindex, *args)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 231, in _start_fn\r\n fn(gindex, *args)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 231, in _start_fn\r\n fn(gindex, *args)\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 300, in _mp_fn\r\n main()\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 300, in _mp_fn\r\n main()\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 300, in _mp_fn\r\n main()\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 240, in main\r\n train_dataset = get_dataset(data_args, tokenizer=tokenizer) if training_args.do_train else None\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 240, in main\r\n train_dataset = get_dataset(data_args, tokenizer=tokenizer) if training_args.do_train else None\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 240, in main\r\n train_dataset = get_dataset(data_args, tokenizer=tokenizer) if training_args.do_train else None\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 134, in get_dataset\r\n dataset = load_dataset(\"text\", data_files=file_path, split=\"train\")\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 546, in load_dataset\r\n download_config=download_config, download_mode=download_mode, ignore_verifications=ignore_verifications,\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 134, in get_dataset\r\n dataset = load_dataset(\"text\", data_files=file_path, split=\"train\")\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 134, in get_dataset\r\n dataset = load_dataset(\"text\", data_files=file_path, split=\"train\")\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 450, in download_and_prepare\r\n with incomplete_dir(self._cache_dir) as tmp_data_dir:\r\nTraceback (most recent call last):\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 546, in load_dataset\r\n download_config=download_config, download_mode=download_mode, ignore_verifications=ignore_verifications,\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/contextlib.py\", line 81, in __enter__\r\n return next(self.gen)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 546, in load_dataset\r\n download_config=download_config, download_mode=download_mode, ignore_verifications=ignore_verifications,\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 231, in _start_fn\r\n fn(gindex, *args)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 450, in download_and_prepare\r\n with incomplete_dir(self._cache_dir) as tmp_data_dir:\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 422, in incomplete_dir\r\n os.makedirs(tmp_dir)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 450, in download_and_prepare\r\n with incomplete_dir(self._cache_dir) as tmp_data_dir:\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 300, in _mp_fn\r\n main()\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/contextlib.py\", line 81, in __enter__\r\n return next(self.gen)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/os.py\", line 220, in makedirs\r\n mkdir(name, mode)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/contextlib.py\", line 81, in __enter__\r\n return next(self.gen)\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 240, in main\r\n train_dataset = get_dataset(data_args, tokenizer=tokenizer) if training_args.do_train else None\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 422, in incomplete_dir\r\n os.makedirs(tmp_dir)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 231, in _start_fn\r\n fn(gindex, *args)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 422, in incomplete_dir\r\n os.makedirs(tmp_dir)\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 134, in get_dataset\r\n dataset = load_dataset(\"text\", data_files=file_path, split=\"train\")\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/os.py\", line 220, in makedirs\r\n mkdir(name, mode)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 546, in load_dataset\r\n download_config=download_config, download_mode=download_mode, ignore_verifications=ignore_verifications,\r\nFileExistsError: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 300, in _mp_fn\r\n main()\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 450, in download_and_prepare\r\n with incomplete_dir(self._cache_dir) as tmp_data_dir:\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/os.py\", line 220, in makedirs\r\n mkdir(name, mode)\r\nFileExistsError: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 240, in main\r\n train_dataset = get_dataset(data_args, tokenizer=tokenizer) if training_args.do_train else None\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/contextlib.py\", line 81, in __enter__\r\n return next(self.gen)\r\nFileExistsError: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 134, in get_dataset\r\n dataset = load_dataset(\"text\", data_files=file_path, split=\"train\")\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 422, in incomplete_dir\r\n os.makedirs(tmp_dir)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 546, in load_dataset\r\n download_config=download_config, download_mode=download_mode, ignore_verifications=ignore_verifications,\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/os.py\", line 220, in makedirs\r\n mkdir(name, mode)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 450, in download_and_prepare\r\n with incomplete_dir(self._cache_dir) as tmp_data_dir:\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/contextlib.py\", line 81, in __enter__\r\n return next(self.gen)\r\nFileExistsError: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 422, in incomplete_dir\r\n os.makedirs(tmp_dir)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/os.py\", line 220, in makedirs\r\n mkdir(name, mode)\r\nTraceback (most recent call last):\r\nFileExistsError: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\nTraceback (most recent call last):\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 231, in _start_fn\r\n fn(gindex, *args)\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 300, in _mp_fn\r\n main()\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 240, in main\r\n train_dataset = get_dataset(data_args, tokenizer=tokenizer) if training_args.do_train else None\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 134, in get_dataset\r\n dataset = load_dataset(\"text\", data_files=file_path, split=\"train\")\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 231, in _start_fn\r\n fn(gindex, *args)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 546, in load_dataset\r\n download_config=download_config, download_mode=download_mode, ignore_verifications=ignore_verifications,\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 450, in download_and_prepare\r\n with incomplete_dir(self._cache_dir) as tmp_data_dir:\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 300, in _mp_fn\r\n main()\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 240, in main\r\n train_dataset = get_dataset(data_args, tokenizer=tokenizer) if training_args.do_train else None\r\n File \"\/home\/*****\/huggingface_roberta\/run_language_modeling.py\", line 134, in get_dataset\r\n dataset = load_dataset(\"text\", data_files=file_path, split=\"train\")\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 546, in load_dataset\r\n download_config=download_config, download_mode=download_mode, ignore_verifications=ignore_verifications,\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 450, in download_and_prepare\r\n with incomplete_dir(self._cache_dir) as tmp_data_dir:\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/contextlib.py\", line 81, in __enter__\r\n return next(self.gen)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/site-packages\/nlp\/builder.py\", line 422, in incomplete_dir\r\n os.makedirs(tmp_dir)\r\n File \"\/anaconda3\/envs\/torch-xla-1.6\/lib\/python3.6\/os.py\", line 220, in makedirs\r\n mkdir(name, mode)\r\nFileExistsError: [Errno 17] File exists: '\/home\/*****\/.cache\/huggingface\/datasets\/text\/default-b0932b2bdbb63283\/0.0.0\/447f2bcfa2a721a37bc8fdf23800eade1523cf07f7eada6fe661fe4d070d380d.incomplete'\r\n```\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/532\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/532\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/531","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/531\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/531\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/531\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/531","id":685291036,"node_id":"MDExOlB1bGxSZXF1ZXN0NDczMDM4ODc4","number":531,"title":"add concatenate_datasets to the docs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-25T08:40:05Z","updated_at":"2020-08-25T09:02:20Z","closed_at":"2020-08-25T09:02:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/531","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/531","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/531.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/531.patch","merged_at":"2020-08-25T09:02:19Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/531\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/531\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/530","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/530\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/530\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/530\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/530","id":684825612,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcyNjQ5NTk2","number":530,"title":"use ragged tensor by default","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-08-24T17:06:15Z","updated_at":"2021-10-22T19:38:40Z","closed_at":"2020-08-24T19:22:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/530","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/530","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/530.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/530.patch","merged_at":"2020-08-24T19:22:25Z"},"body":"I think it's better if it's clear whether the returned tensor is ragged or not when the type is set to tensorflow.\r\nPreviously it was a tensor (not ragged) if numpy could stack the output (which can change depending on the batch of example you take), which make things difficult to handle, as it may sometimes return a ragged tensor and sometimes not.\r\n\r\nTherefore I reverted this behavior to always return a ragged tensor as we used to do.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/530\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/530\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/529","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/529\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/529\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/529\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/529","id":684797157,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcyNjI2MDY4","number":529,"title":"Add MLSUM","user":{"login":"RachelKer","id":36986299,"node_id":"MDQ6VXNlcjM2OTg2Mjk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36986299?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RachelKer","html_url":"https:\/\/github.com\/RachelKer","followers_url":"https:\/\/api.github.com\/users\/RachelKer\/followers","following_url":"https:\/\/api.github.com\/users\/RachelKer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RachelKer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RachelKer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RachelKer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RachelKer\/orgs","repos_url":"https:\/\/api.github.com\/users\/RachelKer\/repos","events_url":"https:\/\/api.github.com\/users\/RachelKer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RachelKer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-08-24T16:18:35Z","updated_at":"2020-08-26T08:04:11Z","closed_at":"2020-08-26T08:04:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/529","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/529","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/529.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/529.patch","merged_at":"2020-08-26T08:04:10Z"},"body":"Hello (again :) !), \r\n\r\nSo, I started a new branch because of a [rebase issue](https:\/\/github.com\/huggingface\/nlp\/pull\/463), sorry for the mess. \r\n\r\nHowever, the command `pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_real_dataset_mlsum` still fails because there is no default language dataset : the script throws an error as a specific config language is necessary. \r\n\r\nI think that setting a default language would be a bad workaround for this so I kept it as it is. Putting all the train files across languages together would also be a bad idea because of the size. \r\n\r\nThanks for your help, \r\n\r\nRachel\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/529\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/529\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/528","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/528\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/528\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/528\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/528","id":684673673,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcyNTIzNDI1","number":528,"title":"fix missing variable names in docs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-24T13:31:48Z","updated_at":"2020-08-25T09:04:04Z","closed_at":"2020-08-25T09:04:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/528","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/528","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/528.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/528.patch","merged_at":"2020-08-25T09:04:03Z"},"body":"fix #524 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/528\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/528\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/527","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/527\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/527\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/527\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/527","id":684632930,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcyNDg4MzUy","number":527,"title":"Fix config used for slow test on real dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-24T12:39:34Z","updated_at":"2020-08-25T09:20:45Z","closed_at":"2020-08-25T09:20:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/527","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/527","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/527.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/527.patch","merged_at":"2020-08-25T09:20:44Z"},"body":"As noticed in #470, #474, #476, #504 , the slow test `test_load_real_dataset` couldn't run on datasets that require config parameters.\r\n\r\nTo fix that I replaced it with one test with the first config of BUILDER_CONFIGS `test_load_real_dataset`, and another test that runs all of the configs in BUILDER_CONFIGS `test_load_real_dataset_all_configs`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/527\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/527\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/526","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/526\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/526\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/526\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/526","id":684615455,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcyNDczNjcw","number":526,"title":"Returning None instead of \"python\" if dataset is unformatted","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-24T12:10:35Z","updated_at":"2020-08-24T12:50:43Z","closed_at":"2020-08-24T12:50:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/526","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/526","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/526.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/526.patch","merged_at":"2020-08-24T12:50:42Z"},"body":"Following the discussion on Slack, this small fix ensures that calling `dataset.set_format(type=dataset.format[\"type\"])` works properly. Slightly breaking as calling `dataset.format` when the dataset is unformatted will return `None` instead of `python`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/526\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/526\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/525","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/525\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/525\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/525\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/525","id":683875483,"node_id":"MDU6SXNzdWU2ODM4NzU0ODM=","number":525,"title":"wmt download speed example","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-08-21T23:29:06Z","updated_at":"2021-02-18T16:16:07Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Continuing from the slack 1.0 roadmap thread w @lhoestq , I realized the slow downloads is only a thing sometimes. Here are a few examples, I suspect there are multiple issues. All commands were run from the same gcp us-central-1f machine.\r\n\r\n```\r\nimport nlp\r\nnlp.load_dataset('wmt16', 'de-en')\r\n```\r\nDownloads at 49.1 KB\/S\r\n\r\nWhereas \r\n```\r\npip install gdown # download from google drive\r\n!gdown https:\/\/drive.google.com\/uc?id=1iO7um-HWoNoRKDtw27YUSgyeubn9uXqj\r\n```\r\nDownloads at 127 MB\/s. (The file is a copy of wmt-en-de raw).\r\n\r\n\r\n```\r\nnlp.load_dataset('wmt16', 'ro-en')\r\n```\r\ngoes at 27 MB\/s, much faster. \r\n\r\nif we wget the same data from s3 is the same download speed, but \u00bc the file size:\r\n```\r\nwget https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/translation\/wmt_en_ro_packed_200_rand.tgz\r\n```\r\n\r\nFinally,\r\n```\r\nnlp.load_dataset('wmt19', 'zh-en')\r\n```\r\nStarts fast, but broken. (duplicate of #493 )\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/525\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/525\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/524","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/524\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/524\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/524\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/524","id":683686359,"node_id":"MDU6SXNzdWU2ODM2ODYzNTk=","number":524,"title":"Some docs are missing parameter names","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-21T16:47:34Z","updated_at":"2020-08-25T09:04:03Z","closed_at":"2020-08-25T09:04:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"See https:\/\/huggingface.co\/nlp\/master\/package_reference\/main_classes.html#nlp.Dataset.map. I believe this is because the parameter names are enclosed in backticks in the docstrings, maybe it's an old docstring format that doesn't work with the current Sphinx version.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/524\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/524\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/523","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/523\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/523\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/523\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/523","id":682573232,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcwNzkxMjA1","number":523,"title":"Speed up Tokenization by optimizing cast_to_python_objects","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-20T09:42:02Z","updated_at":"2020-08-24T08:54:15Z","closed_at":"2020-08-24T08:54:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/523","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/523","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/523.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/523.patch","merged_at":"2020-08-24T08:54:14Z"},"body":"I changed how `cast_to_python_objects` works to make it faster.\r\nIt is used to cast numpy\/pytorch\/tensorflow\/pandas objects to python lists, and it works recursively.\r\n\r\nTo avoid iterating over possibly long lists, it first checks if the first element that is not None has to be casted.\r\nIf the first element needs to be casted, then all the elements of the list will be casted, otherwise they'll stay the same.\r\nThis trick allows to cast objects that contain tokenizers outputs without iterating over every single token for example.\r\n\r\nSpeed improvement:\r\n\r\n\r\n```python\r\nimport transformers\r\nimport nlp\r\n\r\ntok = transformers.BertTokenizerFast.from_pretrained(\"bert-base-uncased\")\r\ntxt = [\"a \" * 512] * 1000\r\ndataset = nlp.Dataset.from_dict({\"txt\": txt})\r\n\r\n# Tokenization using .map is now faster. Previously it was taking 3.5s\r\n%time _ = dataset.map(lambda x: tok(x[\"txt\"]), batched=True, load_from_cache_file=False)\r\n# 450ms\r\n\r\n# for comparison\r\n%time _ = tok(txt)\r\n# 280ms\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/523\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/523\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/522","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/522\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/522\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/522\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/522","id":682478833,"node_id":"MDU6SXNzdWU2ODI0Nzg4MzM=","number":522,"title":"dictionnary typo in docs","user":{"login":"yonigottesman","id":4004127,"node_id":"MDQ6VXNlcjQwMDQxMjc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4004127?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yonigottesman","html_url":"https:\/\/github.com\/yonigottesman","followers_url":"https:\/\/api.github.com\/users\/yonigottesman\/followers","following_url":"https:\/\/api.github.com\/users\/yonigottesman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yonigottesman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yonigottesman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yonigottesman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yonigottesman\/orgs","repos_url":"https:\/\/api.github.com\/users\/yonigottesman\/repos","events_url":"https:\/\/api.github.com\/users\/yonigottesman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yonigottesman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-20T07:11:05Z","updated_at":"2020-08-20T07:52:14Z","closed_at":"2020-08-20T07:52:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Many places dictionary is spelled dictionnary, not sure if its on purpose or not.\r\nFixed in this pr: \r\nhttps:\/\/github.com\/huggingface\/nlp\/pull\/521 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/522\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/522\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/521","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/521\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/521\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/521\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/521","id":682477648,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcwNzEyNzgz","number":521,"title":"Fix dictionnary (dictionary) typo","user":{"login":"yonigottesman","id":4004127,"node_id":"MDQ6VXNlcjQwMDQxMjc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4004127?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yonigottesman","html_url":"https:\/\/github.com\/yonigottesman","followers_url":"https:\/\/api.github.com\/users\/yonigottesman\/followers","following_url":"https:\/\/api.github.com\/users\/yonigottesman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yonigottesman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yonigottesman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yonigottesman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yonigottesman\/orgs","repos_url":"https:\/\/api.github.com\/users\/yonigottesman\/repos","events_url":"https:\/\/api.github.com\/users\/yonigottesman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yonigottesman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-20T07:09:02Z","updated_at":"2020-08-20T07:52:04Z","closed_at":"2020-08-20T07:52:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/521","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/521","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/521.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/521.patch","merged_at":"2020-08-20T07:52:04Z"},"body":"This error happens many times I'm thinking maybe its spelled like this on purpose?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/521\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/521\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/520","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/520\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/520\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/520\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/520","id":682264839,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcwNTI4MDE0","number":520,"title":"Transform references for sacrebleu","user":{"login":"jbragg","id":2238344,"node_id":"MDQ6VXNlcjIyMzgzNDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2238344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jbragg","html_url":"https:\/\/github.com\/jbragg","followers_url":"https:\/\/api.github.com\/users\/jbragg\/followers","following_url":"https:\/\/api.github.com\/users\/jbragg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jbragg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jbragg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jbragg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jbragg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jbragg\/repos","events_url":"https:\/\/api.github.com\/users\/jbragg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jbragg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-20T00:26:55Z","updated_at":"2020-08-20T09:30:54Z","closed_at":"2020-08-20T09:30:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/520","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/520","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/520.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/520.patch","merged_at":"2020-08-20T09:30:53Z"},"body":"Currently it is impossible to use sacrebleu when len(predictions) != the number of references per prediction (very uncommon), due to a strange format expected by sacrebleu. If one passes in the data to `nlp.metric.compute()` in sacrebleu format, `nlp` throws an error due to mismatching lengths between predictions and references. If one uses a more standard format where predictions and references are lists of the same length, sacrebleu throws an error.\r\n\r\nThis PR transforms reference data in a more standard format into the [unusual format](https:\/\/github.com\/mjpost\/sacreBLEU#using-sacrebleu-from-python) expected by sacrebleu.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/520\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/520\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/519","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/519\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/519\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/519\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/519","id":682193882,"node_id":"MDU6SXNzdWU2ODIxOTM4ODI=","number":519,"title":"[BUG] Metrics throwing new error on master since 0.4.0","user":{"login":"jbragg","id":2238344,"node_id":"MDQ6VXNlcjIyMzgzNDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2238344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jbragg","html_url":"https:\/\/github.com\/jbragg","followers_url":"https:\/\/api.github.com\/users\/jbragg\/followers","following_url":"https:\/\/api.github.com\/users\/jbragg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jbragg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jbragg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jbragg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jbragg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jbragg\/repos","events_url":"https:\/\/api.github.com\/users\/jbragg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jbragg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-19T21:29:15Z","updated_at":"2020-08-19T22:04:40Z","closed_at":"2020-08-19T22:04:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The following error occurs when passing in references of type `List[List[str]]` to metrics like bleu.\r\nWasn't happening on 0.4.0 but happening now on master.\r\n\r\n```\r\n File \"\/usr\/local\/lib\/python3.7\/site-packages\/nlp\/metric.py\", line 226, in compute\r\n self.add_batch(predictions=predictions, references=references)\r\n File \"\/usr\/local\/lib\/python3.7\/site-packages\/nlp\/metric.py\", line 242, in add_batch\r\n batch = self.info.features.encode_batch(batch)\r\n File \"\/usr\/local\/lib\/python3.7\/site-packages\/nlp\/features.py\", line 527, in encode_batch\r\n encoded_batch[key] = [encode_nested_example(self[key], cast_to_python_objects(obj)) for obj in column]\r\n File \"\/usr\/local\/lib\/python3.7\/site-packages\/nlp\/features.py\", line 527, in \r\n encoded_batch[key] = [encode_nested_example(self[key], cast_to_python_objects(obj)) for obj in column]\r\n File \"\/usr\/local\/lib\/python3.7\/site-packages\/nlp\/features.py\", line 456, in encode_nested_example\r\n raise ValueError(\"Got a string but expected a list instead: '{}'\".format(obj))\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/519\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/519\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/518","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/518\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/518\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/518\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/518","id":682131165,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcwNDE0ODE1","number":518,"title":"[METRICS, breaking] Refactor caching behavior, pickle\/cloudpickle metrics and dataset, add tests on metrics","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-19T19:43:08Z","updated_at":"2020-08-24T16:01:40Z","closed_at":"2020-08-24T16:01:39Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/518","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/518","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/518.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/518.patch","merged_at":"2020-08-24T16:01:39Z"},"body":"Move the acquisition of the filelock at a later stage during metrics processing so it can be pickled\/cloudpickled after instantiation.\r\n\r\nAlso add some tests on pickling, concurrent but separate metric instances and concurrent and distributed metric instances.\r\n\r\nChanges significantly the caching behavior for the metrics:\r\n- if the metric is used in a non-distributed setup (most common case) we try to find a free cache file using UUID instead of asking for an `experiment_id` if we can't lock the cache file this allows to use several instances of the same metrics in parallel.\r\n- if the metrics is used in a distributed setup we ask for an `experiment_id` if we can't lock the cache file (because all the nodes need to have related cache file names for the final sync.\r\n- after the computation, we free the locks and delete all the cache files.\r\n\r\nBreaking: Some arguments for Metrics initialization have been removed for simplicity (`version`...) and some have been renamed for consistency with the rest of the library (`in_memory` => `keep_in_memory`).\r\n\r\nAlso remove the `_has_transformers` detection in utils to avoid importing transformers everytime during loading.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/518\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/518\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/517","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/517\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/517\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/517\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/517","id":681896944,"node_id":"MDU6SXNzdWU2ODE4OTY5NDQ=","number":517,"title":"add MLDoc dataset","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-19T14:41:59Z","updated_at":"2021-08-03T05:59:33Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI am recommending that someone add MLDoc, a multilingual news topic classification dataset.\r\n\r\n- Here's a link to the Github: https:\/\/github.com\/facebookresearch\/MLDoc\r\n- and the paper: http:\/\/www.lrec-conf.org\/proceedings\/lrec2018\/pdf\/658.pdf\r\n\r\nLooks like the dataset contains news stories in multiple languages that can be classified into four hierarchical groups: CCAT (Corporate\/Industrial), ECAT (Economics), GCAT (Government\/Social) and MCAT (Markets). There are 13 languages: Dutch, French, German, Chinese, Japanese, Russian, Portuguese, Spanish, Latin American Spanish, Italian, Danish, Norwegian, and Swedish","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/517\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/517\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/516","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/516\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/516\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/516\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/516","id":681846032,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcwMTY5NTA0","number":516,"title":"[Breaking] Rename formated to formatted","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-19T13:35:23Z","updated_at":"2020-08-20T08:41:17Z","closed_at":"2020-08-20T08:41:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/516","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/516","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/516.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/516.patch","merged_at":"2020-08-20T08:41:16Z"},"body":"`formated` is not correct but `formatted` is","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/516\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/516\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/515","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/515\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/515\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/515\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/515","id":681845619,"node_id":"MDExOlB1bGxSZXF1ZXN0NDcwMTY5MTQ0","number":515,"title":"Fix batched map for formatted dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-19T13:34:50Z","updated_at":"2020-08-20T20:30:43Z","closed_at":"2020-08-20T20:30:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/515","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/515","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/515.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/515.patch","merged_at":"2020-08-20T20:30:42Z"},"body":"If you had a dataset formatted as numpy for example, and tried to do a batched map, then it would crash because one of the elements from the inputs was missing for unchanged columns (ex: batch of length 999 instead of 1000).\r\nThe happened during the creation of the `pa.Table`, since columns had different lengths.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/515\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/515\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/514","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/514\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/514\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/514\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/514","id":681256348,"node_id":"MDU6SXNzdWU2ODEyNTYzNDg=","number":514,"title":"dataset.shuffle(keep_in_memory=True) is never allowed","user":{"login":"vegarab","id":24683907,"node_id":"MDQ6VXNlcjI0NjgzOTA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24683907?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vegarab","html_url":"https:\/\/github.com\/vegarab","followers_url":"https:\/\/api.github.com\/users\/vegarab\/followers","following_url":"https:\/\/api.github.com\/users\/vegarab\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vegarab\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vegarab\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vegarab\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vegarab\/orgs","repos_url":"https:\/\/api.github.com\/users\/vegarab\/repos","events_url":"https:\/\/api.github.com\/users\/vegarab\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vegarab\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-08-18T18:47:40Z","updated_at":"2021-07-23T18:07:11Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As of commit ef4aac2, the usage of the parameter `keep_in_memory=True` is never possible: `dataset.select(keep_in_memory=True)`\r\n\r\nThe commit added the lines\r\n```python\r\n# lines 994-996 in src\/nlp\/arrow_dataset.py\r\n assert (\r\n not keep_in_memory or cache_file_name is None\r\n ), \"Please use either `keep_in_memory` or `cache_file_name` but not both.\"\r\n```\r\n\r\nThis affects both `shuffle()` as `select()` is a sub-routine, and `map()` that has the same check. \r\n\r\nI'd love to fix this myself, but unsure what the intention of the assert is given the rest of the logic in the function concerning `ccache_file_name` and `keep_in_memory`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/514\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/514\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/513","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/513\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/513\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/513\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/513","id":681215612,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY5NjQxMjg1","number":513,"title":"[speedup] Use indices mappings instead of deepcopy for all the samples reordering methods","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-08-18T17:36:02Z","updated_at":"2020-08-28T08:41:51Z","closed_at":"2020-08-28T08:41:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/513","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/513","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/513.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/513.patch","merged_at":"2020-08-28T08:41:50Z"},"body":"Use an indices mapping instead of rewriting the dataset for all the samples re-ordering\/selection methods (`select`, `sort`, `shuffle`, `shard`, `train_test_split`).\r\n\r\nAdded a `flatten_indices` method which copy the dataset to a new table to remove the indices mapping with tests.\r\n\r\nAll the samples re-ordering\/selection methods should be a lot faster. The downside is that iterating on very large batch of the dataset might be a little slower when we have changed the order of the samples since with in these case we use `pyarrow.Table.take` instead of `pyarrow.Table.slice`. There is no free lunch but the speed of iterating over the dataset is rarely the bottleneck.\r\n\r\n*Backward breaking change*: the `cache_file_name` argument in all the samples re-ordering\/selection methods (`select`, `sort`, `shuffle`, `shard`, `train_test_split`) is now called `indices_cache_file_name` on purpose to make it explicit to the user that this caching file is used for caching the indices mapping and not the dataset itself.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/513\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/513\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/512","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/512\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/512\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/512\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/512","id":681137164,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY5NTc2NzE3","number":512,"title":"Delete CONTRIBUTING.md","user":{"login":"ChenZehong13","id":56394989,"node_id":"MDQ6VXNlcjU2Mzk0OTg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56394989?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ChenZehong13","html_url":"https:\/\/github.com\/ChenZehong13","followers_url":"https:\/\/api.github.com\/users\/ChenZehong13\/followers","following_url":"https:\/\/api.github.com\/users\/ChenZehong13\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ChenZehong13\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ChenZehong13\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ChenZehong13\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ChenZehong13\/orgs","repos_url":"https:\/\/api.github.com\/users\/ChenZehong13\/repos","events_url":"https:\/\/api.github.com\/users\/ChenZehong13\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ChenZehong13\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-18T15:33:25Z","updated_at":"2020-08-18T15:48:21Z","closed_at":"2020-08-18T15:39:07Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/512","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/512","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/512.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/512.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/512\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/512\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/511","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/511\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/511\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/511\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/511","id":681055553,"node_id":"MDU6SXNzdWU2ODEwNTU1NTM=","number":511,"title":"dataset.shuffle() and select() resets format. Intended?","user":{"login":"vegarab","id":24683907,"node_id":"MDQ6VXNlcjI0NjgzOTA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24683907?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vegarab","html_url":"https:\/\/github.com\/vegarab","followers_url":"https:\/\/api.github.com\/users\/vegarab\/followers","following_url":"https:\/\/api.github.com\/users\/vegarab\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vegarab\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vegarab\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vegarab\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vegarab\/orgs","repos_url":"https:\/\/api.github.com\/users\/vegarab\/repos","events_url":"https:\/\/api.github.com\/users\/vegarab\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vegarab\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-08-18T13:46:01Z","updated_at":"2020-09-14T08:45:38Z","closed_at":"2020-09-14T08:45:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Calling `dataset.shuffle()` or `dataset.select()` on a dataset resets its format set by `dataset.set_format()`. Is this intended or an oversight?\r\n\r\nWhen working on quite large datasets that require a lot of preprocessing I find it convenient to save the processed dataset to file using `torch.save(\"dataset.pt\")`. Later loading the dataset object using `torch.load(\"dataset.pt\")`, which conserves the defined format before saving. \r\nI do shuffling and selecting (for controlling dataset size) after loading the data from .pt-file, as it's convenient whenever you train multiple models with varying sizes of the same dataset. \r\n\r\nThe obvious workaround for this is to set the format again after using `dataset.select()` or `dataset.shuffle()`.\r\n\r\n_I guess this is more of a discussion on the design philosophy of the functions. Please let me know if this is not the right channel for these kinds of discussions or if they are not wanted at all!_\r\n\r\n#### How to reproduce:\r\n\r\n```python\r\nimport nlp\r\nfrom transformers import T5Tokenizer\r\n\r\ntokenizer = T5Tokenizer.from_pretrained(\"t5-base\")\r\ndef create_features(batch):\r\n context_encoding = tokenizer.batch_encode_plus(batch[\"context\"])\r\n return {\"input_ids\": context_encoding[\"input_ids\"]}\r\n\r\ndataset = nlp.load_dataset(\"cosmos_qa\", split=\"train\")\r\ndataset = dataset.map(create_features, batched=True)\r\ndataset.set_format(type=\"torch\", columns=[\"input_ids\"])\r\ndataset[0]\r\n# {'input_ids': tensor([ 1804, 3525, 1602, ... 0, 0])}\r\n\r\ndataset = dataset.shuffle()\r\ndataset[0]\r\n# {'id': '3Q9(...)20', 'context': \"Good Old War an (...) play ?', 'answer0': 'None of the above choices .', 'answer1': 'This person likes music and likes to see the show , they will see other bands play .', (...) 'input_ids': [1804, 3525, 1602, ... , 0, 0]}\r\n\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/511\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/511\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/510","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/510\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/510\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/510\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/510","id":680823644,"node_id":"MDU6SXNzdWU2ODA4MjM2NDQ=","number":510,"title":"Version of numpy to use the library","user":{"login":"isspek","id":6966175,"node_id":"MDQ6VXNlcjY5NjYxNzU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6966175?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/isspek","html_url":"https:\/\/github.com\/isspek","followers_url":"https:\/\/api.github.com\/users\/isspek\/followers","following_url":"https:\/\/api.github.com\/users\/isspek\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/isspek\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/isspek\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/isspek\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/isspek\/orgs","repos_url":"https:\/\/api.github.com\/users\/isspek\/repos","events_url":"https:\/\/api.github.com\/users\/isspek\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/isspek\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-18T08:59:13Z","updated_at":"2020-08-19T18:35:56Z","closed_at":"2020-08-19T18:35:56Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Thank you so much for your excellent work! I would like to use nlp library in my project. While importing nlp, I am receiving the following error `AttributeError: module 'numpy.random' has no attribute 'Generator'` Numpy version in my project is 1.16.0. May I learn which numpy version is used for the nlp library.\r\n\r\nThanks in advance.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/510\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/510\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/509","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/509\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/509\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/509\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/509","id":679711585,"node_id":"MDU6SXNzdWU2Nzk3MTE1ODU=","number":509,"title":"Converting TensorFlow dataset example","user":{"login":"saareliad","id":22762845,"node_id":"MDQ6VXNlcjIyNzYyODQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22762845?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/saareliad","html_url":"https:\/\/github.com\/saareliad","followers_url":"https:\/\/api.github.com\/users\/saareliad\/followers","following_url":"https:\/\/api.github.com\/users\/saareliad\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/saareliad\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/saareliad\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/saareliad\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/saareliad\/orgs","repos_url":"https:\/\/api.github.com\/users\/saareliad\/repos","events_url":"https:\/\/api.github.com\/users\/saareliad\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/saareliad\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-16T08:05:20Z","updated_at":"2021-08-03T06:01:18Z","closed_at":"2021-08-03T06:01:17Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\nI want to use TensorFlow datasets with this repo, I noticed you made some conversion script,\r\ncan you give a simple example of using it?\r\n\r\nThanks\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/509\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/509\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/508","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/508\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/508\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/508\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/508","id":679705734,"node_id":"MDU6SXNzdWU2Nzk3MDU3MzQ=","number":508,"title":"TypeError: Receiver() takes no arguments","user":{"login":"sebastiantomac","id":1225851,"node_id":"MDQ6VXNlcjEyMjU4NTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1225851?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sebastiantomac","html_url":"https:\/\/github.com\/sebastiantomac","followers_url":"https:\/\/api.github.com\/users\/sebastiantomac\/followers","following_url":"https:\/\/api.github.com\/users\/sebastiantomac\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sebastiantomac\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sebastiantomac\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sebastiantomac\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sebastiantomac\/orgs","repos_url":"https:\/\/api.github.com\/users\/sebastiantomac\/repos","events_url":"https:\/\/api.github.com\/users\/sebastiantomac\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sebastiantomac\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-08-16T07:18:16Z","updated_at":"2020-09-01T14:53:33Z","closed_at":"2020-09-01T14:49:03Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to load a wikipedia data set\r\n\r\n```\r\nimport nlp\r\nfrom nlp import load_dataset\r\n\r\ndataset = load_dataset(\"wikipedia\", \"20200501.en\", split=\"train\", cache_dir=data_path, beam_runner='DirectRunner')\r\n#dataset = load_dataset('wikipedia', '20200501.sv', cache_dir=data_path, beam_runner='DirectRunner')\r\n```\r\n\r\nThis fails in the apache beam runner. \r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"D:\/ML\/wikiembedding\/gpt2_sv.py\", line 36, in \r\n dataset = load_dataset(\"wikipedia\", \"20200501.en\", split=\"train\", cache_dir=my_cache_dir, beam_runner='DirectRunner')\r\n File \"C:\\Users\\seto\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\nlp\\load.py\", line 548, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"C:\\Users\\seto\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\nlp\\builder.py\", line 462, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"C:\\Users\\seto\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\nlp\\builder.py\", line 969, in _download_and_prepare\r\n pipeline_results = pipeline.run()\r\n File \"C:\\Users\\seto\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\apache_beam\\pipeline.py\", line 534, in run\r\n return self.runner.run_pipeline(self, self._options)\r\n....\r\n File \"C:\\Users\\seto\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\apache_beam\\runners\\worker\\bundle_processor.py\", line 218, in process_encoded\r\n self.output(decoded_value)\r\n File \"C:\\Users\\seto\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\apache_beam\\runners\\worker\\operations.py\", line 332, in output\r\n cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)\r\n File \"C:\\Users\\seto\\AppData\\Local\\Programs\\Python\\Python38\\lib\\site-packages\\Cython\\Shadow.py\", line 167, in cast\r\n return type(*args)\r\nTypeError: Receiver() takes no arguments\r\n\r\n```\r\n\r\nThis is run on a Windows 10 machine with python 3.8. I get the same error loading the swedish wikipedia dump.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/508\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/508\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/507","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/507\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/507\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/507\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/507","id":679400683,"node_id":"MDU6SXNzdWU2Nzk0MDA2ODM=","number":507,"title":"Errors when I use ","user":{"login":"mchari","id":30506151,"node_id":"MDQ6VXNlcjMwNTA2MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30506151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mchari","html_url":"https:\/\/github.com\/mchari","followers_url":"https:\/\/api.github.com\/users\/mchari\/followers","following_url":"https:\/\/api.github.com\/users\/mchari\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mchari\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mchari\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mchari\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mchari\/orgs","repos_url":"https:\/\/api.github.com\/users\/mchari\/repos","events_url":"https:\/\/api.github.com\/users\/mchari\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mchari\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-14T21:03:57Z","updated_at":"2020-08-14T21:39:10Z","closed_at":"2020-08-14T21:39:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I tried the following example code from https:\/\/huggingface.co\/deepset\/roberta-base-squad2 and got errors \r\nI am using **transformers 3.0.2** code .\r\n\r\n\r\nfrom transformers.pipelines import pipeline\r\nfrom transformers.modeling_auto import AutoModelForQuestionAnswering\r\nfrom transformers.tokenization_auto import AutoTokenizer\r\n\r\nmodel_name = \"deepset\/roberta-base-squad2\"\r\n\r\nnlp = pipeline('question-answering', model=model_name, tokenizer=model_name)\r\nQA_input = {\r\n 'question': 'Why is model conversion important?',\r\n 'context': 'The option to convert models between FARM and transformers gives freedom to the user and let people easily switch between frameworks.'\r\n}\r\nres = nlp(QA_input)\r\n\r\nThe errors are :\r\n\r\nres = nlp(QA_input)\r\n File \".local\/lib\/python3.6\/site-packages\/transformers\/pipelines.py\", line 1316, in __call__\r\n for s, e, score in zip(starts, ends, scores)\r\n File \".local\/lib\/python3.6\/site-packages\/transformers\/pipelines.py\", line 1316, in \r\n for s, e, score in zip(starts, ends, scores)\r\nKeyError: 0\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/507\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/507\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/506","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/506\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/506\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/506\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/506","id":679164788,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY3OTkwNjc2","number":506,"title":"fix dataset.map for function without outputs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-14T13:40:22Z","updated_at":"2020-08-17T11:24:39Z","closed_at":"2020-08-17T11:24:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/506","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/506","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/506.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/506.patch","merged_at":"2020-08-17T11:24:38Z"},"body":"As noticed in #505 , giving a function that doesn't return anything in `.map` raises an error because of an unreferenced variable.\r\nI fixed that and added tests.\r\n\r\nThanks @avloss for reporting","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/506\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/506\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/505","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/505\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/505\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/505\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/505","id":678791400,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY3NjgxMjY4","number":505,"title":"tmp_file referenced before assignment","user":{"login":"avloss","id":17853685,"node_id":"MDQ6VXNlcjE3ODUzNjg1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17853685?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/avloss","html_url":"https:\/\/github.com\/avloss","followers_url":"https:\/\/api.github.com\/users\/avloss\/followers","following_url":"https:\/\/api.github.com\/users\/avloss\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/avloss\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/avloss\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/avloss\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/avloss\/orgs","repos_url":"https:\/\/api.github.com\/users\/avloss\/repos","events_url":"https:\/\/api.github.com\/users\/avloss\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/avloss\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-13T23:27:33Z","updated_at":"2020-08-14T13:42:46Z","closed_at":"2020-08-14T13:42:46Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/505","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/505","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/505.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/505.patch","merged_at":null},"body":"Just learning about this library - so might've not set up all the flags correctly, but was getting this error about \"tmp_file\".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/505\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/505\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/504","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/504\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/504\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/504\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/504","id":678756211,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY3NjUxOTA5","number":504,"title":"Added downloading to Hyperpartisan news detection","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-13T21:53:46Z","updated_at":"2020-08-27T08:18:41Z","closed_at":"2020-08-27T08:18:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/504","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/504","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/504.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/504.patch","merged_at":"2020-08-27T08:18:41Z"},"body":"Following the discussion on Slack and #349, I've updated the hyperpartisan dataset to pull directly from Zenodo rather than manual install, which should make this dataset much more accessible. Many thanks to @johanneskiesel !\r\n\r\nCurrently doesn't pass `test_load_real_dataset` - I'm using `self.config.name` which is `default` in this test. Might be related to #474","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/504\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/504\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/503","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/503\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/503\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/503\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/503","id":678726538,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY3NjI3MTEw","number":503,"title":"CompGuessWhat?! 0.2.0","user":{"login":"aleSuglia","id":1479733,"node_id":"MDQ6VXNlcjE0Nzk3MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1479733?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aleSuglia","html_url":"https:\/\/github.com\/aleSuglia","followers_url":"https:\/\/api.github.com\/users\/aleSuglia\/followers","following_url":"https:\/\/api.github.com\/users\/aleSuglia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aleSuglia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aleSuglia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aleSuglia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aleSuglia\/orgs","repos_url":"https:\/\/api.github.com\/users\/aleSuglia\/repos","events_url":"https:\/\/api.github.com\/users\/aleSuglia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aleSuglia\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":20,"created_at":"2020-08-13T20:51:26Z","updated_at":"2020-10-21T06:54:29Z","closed_at":"2020-10-21T06:54:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/503","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/503","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/503.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/503.patch","merged_at":null},"body":"We updated some metadata information associated with the dataset. In addition, we've updated the `create_dummy_data.py` script to generate data samples for the dataset. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/503\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/503\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/502","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/502\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/502\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/502\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/502","id":678546070,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY3NDc1MDg0","number":502,"title":"Fix tokenizers caching","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-13T15:53:37Z","updated_at":"2020-08-19T13:37:19Z","closed_at":"2020-08-19T13:37:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/502","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/502","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/502.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/502.patch","merged_at":"2020-08-19T13:37:17Z"},"body":"I've found some cases where the caching didn't work properly for tokenizers:\r\n\r\n1. if a tokenizer has a regex pattern, then the caching would be inconsistent across sessions\r\n2. if a tokenizer has a cache attribute that changes after some calls, the the caching would not work after cache updates\r\n3. if a tokenizer is used inside a function, the caching of this function would result in the same cache file for different tokenizers\r\n4. if `unique_no_split_tokens`'s attribute is not the same across sessions (after loading a tokenizer) then the caching could be inconsistent\r\n\r\nTo fix that, this is what I did:\r\n\r\n1. register a specific `save_regex` function for pickle that makes regex dumps deterministic\r\n2. ignore cache attribute of some tokenizers before dumping\r\n3. enable recursive dump by default for all dumps\r\n4. make `unique_no_split_tokens` deterministic in https:\/\/github.com\/huggingface\/transformers\/pull\/6461\r\n\r\nI also added tests to make sure that tokenizers hashing works as expected.\r\nIn the future we should find a way to test if hashing also works across session (maybe using two CI jobs ? or by hardcoding a tokenizer's hash ?)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/502\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/502\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/501","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/501\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/501\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/501\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/501","id":677952893,"node_id":"MDU6SXNzdWU2Nzc5NTI4OTM=","number":501,"title":"Caching doesn't work for map (non-deterministic)","user":{"login":"wulu473","id":8149933,"node_id":"MDQ6VXNlcjgxNDk5MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8149933?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wulu473","html_url":"https:\/\/github.com\/wulu473","followers_url":"https:\/\/api.github.com\/users\/wulu473\/followers","following_url":"https:\/\/api.github.com\/users\/wulu473\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wulu473\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wulu473\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wulu473\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wulu473\/orgs","repos_url":"https:\/\/api.github.com\/users\/wulu473\/repos","events_url":"https:\/\/api.github.com\/users\/wulu473\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wulu473\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2020-08-12T20:20:07Z","updated_at":"2020-08-24T16:35:00Z","closed_at":"2020-08-24T16:34:35Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The caching functionality doesn't work reliably when tokenizing a dataset. Here's a small example to reproduce it. \r\n\r\n```python\r\nimport nlp\r\nimport transformers\r\n\r\ndef main():\r\n ds = nlp.load_dataset(\"reddit\", split=\"train[:500]\")\r\n\r\n tokenizer = transformers.AutoTokenizer.from_pretrained(\"gpt2\")\r\n\r\n def convert_to_features(example_batch):\r\n input_str = example_batch[\"body\"]\r\n encodings = tokenizer(input_str, add_special_tokens=True, truncation=True)\r\n return encodings\r\n\r\n ds = ds.map(convert_to_features, batched=True)\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n```\r\n\r\nRoughly 3\/10 times, this example recomputes the tokenization.\r\n\r\nIs this expected behaviour?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/501\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/501\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/500","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/500\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/500\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/500\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/500","id":677841708,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY2ODk0NTk0","number":500,"title":"Use hnsw in wiki_dpr","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-12T16:58:07Z","updated_at":"2020-08-20T07:59:19Z","closed_at":"2020-08-20T07:59:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/500","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/500","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/500.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/500.patch","merged_at":"2020-08-20T07:59:18Z"},"body":"The HNSW faiss index is much faster that regular Flat index.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/500\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/500\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/499","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/499\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/499\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/499\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/499","id":677709938,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY2Nzg1MjAy","number":499,"title":"Narrativeqa (with full text)","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-08-12T13:49:43Z","updated_at":"2020-12-09T11:21:02Z","closed_at":"2020-12-09T11:21:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/499","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/499","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/499.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/499.patch","merged_at":null},"body":"Following the uploading of the full text data in #309, I've added the full text to the narrativeqa dataset.\r\n\r\nFew notes:\r\n- Had some encoding issues using the default `open` so am using `open(encoding=\"latin-1\"...` which seems to fix it. Looks fine.\r\n- Can't get the dummy data to work. Currently putting stuff at: \r\n ```\r\n dummy\r\n |---- 0.0.0\r\n |- dummy_data.zip\r\n |-master.zip\r\n | |- narrativeqa-master\r\n | |- documents.csv\r\n | |- qaps.csv\r\n | |- third_party ......\r\n | \r\n | - narrativeqa_full_text.zip\r\n | | - 001.content\r\n | | - ....\r\n ```\r\n Not sure what I'm messing up here (probably something obvious).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/499\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/499\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/498","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/498\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/498\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/498\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/498","id":677597479,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY2Njg5NTcy","number":498,"title":"dont use beam fs to save info for local cache dir","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-12T11:00:00Z","updated_at":"2020-08-14T13:17:21Z","closed_at":"2020-08-14T13:17:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/498","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/498","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/498.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/498.patch","merged_at":"2020-08-14T13:17:20Z"},"body":"If the cache dir is local, then we shouldn't use beam's filesystem to save the dataset info\r\n\r\nFix #490 \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/498\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/498\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/497","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/497\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/497\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/497\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/497","id":677057116,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY2MjQ2NDQ3","number":497,"title":"skip header in PAWS-X","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-11T17:26:25Z","updated_at":"2020-08-19T09:50:02Z","closed_at":"2020-08-19T09:50:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/497","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/497","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/497.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/497.patch","merged_at":"2020-08-19T09:50:01Z"},"body":"This should fix #485 \r\n\r\nI also updated the `dataset_infos.json` file that is used to verify the integrity of the generated splits (the number of examples was reduced by one).\r\n\r\nNote that there are new fields in `dataset_infos.json` introduced in the latest release 0.4.0 corresponding to post processing info. I removed them in this case when I ran `nlp-cli .\/datasets\/xtreme --save_infos` to keep backward compatibility (versions 0.3.0 can't load these fields).\r\n\r\nI think I'll change the logic so that `nlp-cli test` doesn't create these fields for dataset with no post processing","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/497\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/497\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/496","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/496\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/496\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/496\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/496","id":677016998,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY2MjE1Mjg1","number":496,"title":"fix bad type in overflow check","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-11T16:24:58Z","updated_at":"2020-08-14T13:29:35Z","closed_at":"2020-08-14T13:29:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/496","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/496","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/496.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/496.patch","merged_at":"2020-08-14T13:29:34Z"},"body":"When writing an arrow file and inferring the features, the overflow check could fail if the first example had a `null` field.\r\nThis is because we were not using the inferred features to do this check, and we could end up with arrays that don't match because of a type mismatch (`null` vs `string` for example).\r\n\r\nThis should fix #482","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/496\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/496\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/495","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/495\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/495\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/495\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/495","id":676959289,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY2MTY5MTA3","number":495,"title":"stack vectors in pytorch and tensorflow","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-11T15:12:53Z","updated_at":"2020-08-12T09:30:49Z","closed_at":"2020-08-12T09:30:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/495","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/495","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/495.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/495.patch","merged_at":"2020-08-12T09:30:48Z"},"body":"When the format of a dataset is set to pytorch or tensorflow, and if the dataset has vectors in it, they were not stacked together as tensors when calling `dataset[i:i + batch_size][column]` or `dataset[column]`.\r\n\r\nI added support for stacked tensors for both pytorch and tensorflow.\r\nFor ragged tensors, they are stacked only for tensorflow as pytorch doesn't support ragged tensors.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/495\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/495\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/494","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/494\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/494\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/494\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/494","id":676886955,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY2MTExOTQz","number":494,"title":"Fix numpy stacking","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-11T13:40:30Z","updated_at":"2020-08-11T14:56:50Z","closed_at":"2020-08-11T13:49:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/494","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/494","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/494.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/494.patch","merged_at":"2020-08-11T13:49:52Z"},"body":"When getting items using a column name as a key, numpy arrays were not stacked.\r\nI fixed that and added some tests.\r\n\r\nThere is another issue that still needs to be fixed though: when getting items using a column name as a key, pytorch tensors are not stacked (it outputs a list of tensors). This PR should help with the to fix this issue.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/494\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/494\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/493","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/493\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/493\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/493\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/493","id":676527351,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY1ODIxOTA0","number":493,"title":"Fix wmt zh-en url","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-11T02:14:52Z","updated_at":"2020-08-11T02:22:28Z","closed_at":"2020-08-11T02:22:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/493","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/493","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/493.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/493.patch","merged_at":null},"body":"I verified that\r\n```\r\nwget https:\/\/stuncorpusprod.blob.core.windows.net\/corpusfiles\/UNv1.0.en-zh.tar.gz.00\r\n```\r\nruns in 2 minutes.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/493\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/493\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/492","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/492\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/492\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/492\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/492","id":676495064,"node_id":"MDU6SXNzdWU2NzY0OTUwNjQ=","number":492,"title":"nlp.Features does not distinguish between nullable and non-nullable types in PyArrow schema","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-08-11T00:27:46Z","updated_at":"2020-08-26T16:17:19Z","closed_at":"2020-08-26T16:17:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Here's the code I'm trying to run:\r\n\r\n```python\r\ndset_wikipedia = nlp.load_dataset(\"wikipedia\", \"20200501.en\", split=\"train\", cache_dir=args.cache_dir)\r\ndset_wikipedia.drop(columns=[\"title\"])\r\ndset_wikipedia.features.pop(\"title\")\r\ndset_books = nlp.load_dataset(\"bookcorpus\", split=\"train\", cache_dir=args.cache_dir)\r\ndset = nlp.concatenate_datasets([dset_wikipedia, dset_books])\r\n```\r\n\r\nThis fails because they have different schemas, despite having identical features.\r\n\r\n```python\r\nassert dset_wikipedia.features == dset_books.features # True\r\nassert dset_wikipedia._data.schema == dset_books._data.schema # False\r\n```\r\n\r\nThe Wikipedia dataset has 'text: string', while the BookCorpus dataset has 'text: string not null'. Currently I hack together a working schema match with the following line, but it would be better if this was handled in Features themselves.\r\n\r\n```python\r\ndset_wikipedia._data = dset_wikipedia.data.cast(dset_books._data.schema)\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/492\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/492\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/491","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/491\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/491\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/491\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/491","id":676486275,"node_id":"MDU6SXNzdWU2NzY0ODYyNzU=","number":491,"title":"No 0.4.0 release on GitHub","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-10T23:59:57Z","updated_at":"2020-08-11T16:50:07Z","closed_at":"2020-08-11T16:50:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"0.4.0 was released on PyPi, but not on GitHub. This means [the documentation](https:\/\/huggingface.co\/nlp\/) is still displaying from 0.3.0, and that there's no tag to easily clone the 0.4.0 version of the repo.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/491\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/491\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/490","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/490\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/490\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/490\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/490","id":676482242,"node_id":"MDU6SXNzdWU2NzY0ODIyNDI=","number":490,"title":"Loading preprocessed Wikipedia dataset requires apache_beam","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-10T23:46:50Z","updated_at":"2020-08-14T13:17:20Z","closed_at":"2020-08-14T13:17:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Running \r\n\r\n`nlp.load_dataset(\"wikipedia\", \"20200501.en\", split=\"train\", dir=\"\/tmp\/wikipedia\")`\r\n\r\ngives an error if apache_beam is not installed, stemming from\r\n\r\nhttps:\/\/github.com\/huggingface\/nlp\/blob\/38eb2413de54ee804b0be81781bd65ac4a748ced\/src\/nlp\/builder.py#L981-L988\r\n\r\nThis succeeded without the dependency in version 0.3.0. This seems like an unnecessary dependency to process some dataset info if you're using the already-preprocessed version. Could it be removed?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/490\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/490\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/489","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/489\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/489\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/489\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/489","id":676456257,"node_id":"MDU6SXNzdWU2NzY0NTYyNTc=","number":489,"title":"ug","user":{"login":"timothyjlaurent","id":2000204,"node_id":"MDQ6VXNlcjIwMDAyMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2000204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timothyjlaurent","html_url":"https:\/\/github.com\/timothyjlaurent","followers_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/followers","following_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/orgs","repos_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/repos","events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-10T22:33:03Z","updated_at":"2020-08-10T22:55:14Z","closed_at":"2020-08-10T22:33:40Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/489\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/489\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/488","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/488\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/488\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/488\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/488","id":676299993,"node_id":"MDU6SXNzdWU2NzYyOTk5OTM=","number":488,"title":"issues with downloading datasets for wmt16 and wmt19","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-10T17:32:51Z","updated_at":"2020-08-11T05:07:34Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I have encountered multiple issues while trying to:\r\n```\r\nimport nlp\r\ndataset = nlp.load_dataset('wmt16', 'ru-en')\r\nmetric = nlp.load_metric('wmt16')\r\n```\r\n1. I had to do `pip install -e \".[dev]\" ` on master, currently released nlp didn't work (sorry, didn't save the error) - I went back to the released version and now it worked. So it must have been some outdated dependencies that `pip install -e \".[dev]\" ` fixed.\r\n\r\n2. it was downloading at 60kbs - almost 5 hours to get the dataset. It was downloading all pairs and not just the one I asked for. \r\n\r\nI tried the same code with `wmt19` in parallel and it took a few secs to download and it only fetched data for the requested pair. (but it failed too, see below)\r\n\r\n3. my machine has crushed and when I retried I got:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \".\/download.py\", line 9, in \r\n dataset = nlp.load_dataset('wmt16', 'ru-en')\r\n File \"\/mnt\/nvme1\/code\/huggingface\/nlp-master\/src\/nlp\/load.py\", line 549, in load_dataset\r\n download_config=download_config, download_mode=download_mode, ignore_verifications=ignore_verifications,\r\n File \"\/mnt\/nvme1\/code\/huggingface\/nlp-master\/src\/nlp\/builder.py\", line 449, in download_and_prepare\r\n with incomplete_dir(self._cache_dir) as tmp_data_dir:\r\n File \"\/home\/stas\/anaconda3\/envs\/main\/lib\/python3.7\/contextlib.py\", line 112, in __enter__\r\n return next(self.gen)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/nlp-master\/src\/nlp\/builder.py\", line 422, in incomplete_dir\r\n os.makedirs(tmp_dir)\r\n File \"\/home\/stas\/anaconda3\/envs\/main\/lib\/python3.7\/os.py\", line 221, in makedirs\r\n mkdir(name, mode)\r\nFileExistsError: [Errno 17] File exists: '\/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ru-en\/1.0.0\/4d8269cdd971ed26984a9c0e4a158e0c7afc8135fac8fb8ee43ceecf38fd422d.incomplete'\r\n```\r\nit can't handle resumes. but neither allows a new start. Had to delete it manually.\r\n\r\n4. and finally when it downloaded the dataset, it then failed to fetch the metrics:\r\n```\r\nTraceback (most recent call last):\r\n File \".\/download.py\", line 15, in \r\n metric = nlp.load_metric('wmt16')\r\n File \"\/mnt\/nvme1\/code\/huggingface\/nlp-master\/src\/nlp\/load.py\", line 442, in load_metric\r\n module_path, hash = prepare_module(path, download_config=download_config, dataset=False)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/nlp-master\/src\/nlp\/load.py\", line 258, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/nlp-master\/src\/nlp\/utils\/file_utils.py\", line 198, in cached_path\r\n local_files_only=download_config.local_files_only,\r\n File \"\/mnt\/nvme1\/code\/huggingface\/nlp-master\/src\/nlp\/utils\/file_utils.py\", line 356, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/nlp\/metrics\/wmt16\/wmt16.py\r\n```\r\n\r\n5. If I run the same code with `wmt19`, it fails too:\r\n\r\n```\r\nConnectionError: Couldn't reach https:\/\/storage.googleapis.com\/tfdataset-data\/downloadataset\/uncorpus\/UNv1.0.en-ru.tar.gz\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/488\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/488\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/487","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/487\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/487\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/487\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/487","id":676143029,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY1NTA1NjQy","number":487,"title":"Fix elasticsearch result ids returning as strings","user":{"login":"sai-prasanna","id":3595526,"node_id":"MDQ6VXNlcjM1OTU1MjY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3595526?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sai-prasanna","html_url":"https:\/\/github.com\/sai-prasanna","followers_url":"https:\/\/api.github.com\/users\/sai-prasanna\/followers","following_url":"https:\/\/api.github.com\/users\/sai-prasanna\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sai-prasanna\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sai-prasanna\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sai-prasanna\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sai-prasanna\/orgs","repos_url":"https:\/\/api.github.com\/users\/sai-prasanna\/repos","events_url":"https:\/\/api.github.com\/users\/sai-prasanna\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sai-prasanna\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-10T13:37:11Z","updated_at":"2020-08-31T10:42:46Z","closed_at":"2020-08-31T10:42:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/487","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/487","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/487.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/487.patch","merged_at":"2020-08-31T10:42:46Z"},"body":"I am using the latest elasticsearch binary and master of nlp. For me elasticsearch searches failed because the resultant \"id_\" returned for searches are strings, but our library assumes them to be integers.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/487\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/487\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/486","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/486\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/486\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/486\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/486","id":675649034,"node_id":"MDU6SXNzdWU2NzU2NDkwMzQ=","number":486,"title":"Bookcorpus data contains pretokenized text","user":{"login":"orsharir","id":99543,"node_id":"MDQ6VXNlcjk5NTQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/99543?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/orsharir","html_url":"https:\/\/github.com\/orsharir","followers_url":"https:\/\/api.github.com\/users\/orsharir\/followers","following_url":"https:\/\/api.github.com\/users\/orsharir\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/orsharir\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/orsharir\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/orsharir\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/orsharir\/orgs","repos_url":"https:\/\/api.github.com\/users\/orsharir\/repos","events_url":"https:\/\/api.github.com\/users\/orsharir\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/orsharir\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-08-09T06:53:24Z","updated_at":"2020-09-30T12:01:04Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It seem that the bookcoprus data downloaded through the library was pretokenized with NLTK's Treebank tokenizer, which changes the text in incompatible ways to how, for instance, BERT's wordpiece tokenizer works. For example, \"didn't\" becomes \"did\" + \"n't\", and double quotes are changed to `` and '' for start and end quotes, respectively.\r\n\r\nOn my own projects, I just run the data through NLTK's TreebankWordDetokenizer to reverse the tokenization (as best as possible). I think it would be beneficial to apply this transformation directly on your remote cached copy of the dataset. If you choose to do so, I would also suggest to use my fork of NLTK that fixes several bugs in their detokenizer (I've opened a pull-request, but they've yet to respond): https:\/\/github.com\/nltk\/nltk\/pull\/2575","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/486\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/486\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/485","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/485\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/485\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/485\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/485","id":675595393,"node_id":"MDU6SXNzdWU2NzU1OTUzOTM=","number":485,"title":"PAWS dataset first item is header","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-08T22:05:25Z","updated_at":"2020-08-19T09:50:01Z","closed_at":"2020-08-19T09:50:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nimport nlp\r\ndataset = nlp.load_dataset('xtreme', 'PAWS-X.en')\r\ndataset['test'][0]\r\n```\r\n\r\nprints the following\r\n\r\n```\r\n{'label': 'label', 'sentence1': 'sentence1', 'sentence2': 'sentence2'}\r\n```\r\n\r\ndataset['test'][0] should probably be the first item in the dataset, not just a dictionary mapping the column names to themselves. Probably just need to ignore the first row in the dataset by default or something like that.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/485\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/485\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/484","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/484\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/484\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/484\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/484","id":675088983,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY0NjY1NTU4","number":484,"title":"update mirror for RT dataset","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-08-07T15:25:45Z","updated_at":"2020-08-24T13:33:37Z","closed_at":"2020-08-24T13:33:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/484","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/484","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/484.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/484.patch","merged_at":"2020-08-24T13:33:37Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/484\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/484\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/483","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/483\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/483\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/483\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/483","id":675080694,"node_id":"MDU6SXNzdWU2NzUwODA2OTQ=","number":483,"title":"rotten tomatoes movie review dataset taken down","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-08-07T15:12:01Z","updated_at":"2020-09-08T09:36:34Z","closed_at":"2020-09-08T09:36:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In an interesting twist of events, the individual who created the movie review seems to have left Cornell, and their webpage has been removed, along with the movie review dataset (http:\/\/www.cs.cornell.edu\/people\/pabo\/movie-review-data\/rt-polaritydata.tar.gz). It's not downloadable anymore.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/483\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/483\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/482","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/482\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/482\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/482\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/482","id":674851147,"node_id":"MDU6SXNzdWU2NzQ4NTExNDc=","number":482,"title":"Bugs : dataset.map() is frozen on ELI5","user":{"login":"ratthachat","id":56621342,"node_id":"MDQ6VXNlcjU2NjIxMzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56621342?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ratthachat","html_url":"https:\/\/github.com\/ratthachat","followers_url":"https:\/\/api.github.com\/users\/ratthachat\/followers","following_url":"https:\/\/api.github.com\/users\/ratthachat\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ratthachat\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ratthachat\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ratthachat\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ratthachat\/orgs","repos_url":"https:\/\/api.github.com\/users\/ratthachat\/repos","events_url":"https:\/\/api.github.com\/users\/ratthachat\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ratthachat\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2020-08-07T08:23:35Z","updated_at":"2020-08-12T14:13:46Z","closed_at":"2020-08-11T23:55:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi Huggingface Team!\r\n\r\nThank you guys once again for this amazing repo.\r\n\r\nI have tried to prepare ELI5 to train with T5, based on [this wonderful notebook of Suraj Patil](https:\/\/github.com\/patil-suraj\/exploring-T5\/blob\/master\/T5_on_TPU.ipynb) \r\n\r\nHowever, when I run `dataset.map()` on ELI5 to prepare `input_text, target_text`, `dataset.map` is **frozen** in the first hundreds examples. On the contrary, this works totally fine on SQUAD (80,000 examples). Both `nlp` version 0.3.0 and 0.4.0 cause frozen process . Also try various `pyarrow` versions from 0.16.0 \/ 0.17.0 \/ 1.0.0 also have the same frozen process.\r\n\r\nReproducible code can be found on [this colab notebook ](https:\/\/colab.research.google.com\/drive\/14wttOTv3ky74B_c0kv5WrbgQjCF2fYQk?usp=sharing), where I also show that the same mapping function works fine on SQUAD, so the problem is likely due to ELI5 somehow.\r\n\r\n----------------------------------------\r\n**More Info :** instead of `map`, if I run `for` loop and apply function by myself, there's no error and can finish within 10 seconds. However, `nlp dataset` is immutable (I couldn't manually assign a new key-value to `dataset `object)\r\n\r\nI also notice that SQUAD texts are quite clean while ELI5 texts contain many special characters, not sure if this is the cause ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/482\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/482\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/481","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/481\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/481\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/481\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/481","id":674567389,"node_id":"MDExOlB1bGxSZXF1ZXN0NDY0MjM2MTA1","number":481,"title":"Apply utf-8 encoding to all datasets","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-08-06T20:02:09Z","updated_at":"2020-08-20T08:16:08Z","closed_at":"2020-08-20T08:16:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/481","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/481","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/481.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/481.patch","merged_at":"2020-08-20T08:16:08Z"},"body":"## Description\r\nThis PR applies utf-8 encoding for all instances of `with open(...) as f` to all Python files in `datasets\/`. As suggested by @thomwolf in #468 , we use regular expressions and the following function\r\n\r\n```python\r\ndef apply_encoding_on_file_open(filepath: str):\r\n \"\"\"Apply UTF-8 encoding for all instances where a non-binary file is opened.\"\"\"\r\n \r\n with open(filepath, 'r', encoding='utf-8') as input_file:\r\n regexp = re.compile(r\"(?!.*\\b(?:encoding|rb|w|wb|w+|wb+|ab|ab+)\\b)(?<=\\s)(open)\\((.*)\\)\")\r\n input_text = input_file.read()\r\n match = regexp.search(input_text)\r\n \r\n if match:\r\n output = regexp.sub(lambda m: m.group()[:-1]+', encoding=\"utf-8\")', input_text)\r\n with open(filepath, 'w', encoding='utf-8') as output_file:\r\n output_file.write(output)\r\n```\r\n\r\nto perform the replacement. \r\n\r\nNote:\r\n\r\n1. I excluded all _**binary files**_ from the search since it's possible some objects are opened for which the encoding doesn't make sense. Please correct me if I'm wrong and I'll tweak the regexp accordingly\r\n2. There were two edge cases where the regexp failed (e.g. two `open` instances on a single line), but I decided to just fix these manually in the interest of time.\r\n3. I only applied the replacement to files in `datasets\/`. Let me know if this should be extended to other places like `metrics\/`\r\n4. I have implemented a unit test that should catch missing encodings in future CI runs\r\n\r\nCloses #468 and possibly #347 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/481\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/481\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/480","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/480\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/480\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/480\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/480","id":674245959,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYzOTcwNjQ2","number":480,"title":"Column indexing hotfix","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-06T11:37:05Z","updated_at":"2020-08-12T08:36:10Z","closed_at":"2020-08-12T08:36:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/480","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/480","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/480.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/480.patch","merged_at":null},"body":"As observed for example in #469 , currently `__getitem__` does not convert the data to the dataset format when indexing by column. This is a hotfix that imitates functional 0.3.0. code. In the future it'd probably be nice to have a test there.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/480\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/480\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/479","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/479\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/479\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/479\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/479","id":673905407,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYzNjkxMjA0","number":479,"title":"add METEOR metric","user":{"login":"vegarab","id":24683907,"node_id":"MDQ6VXNlcjI0NjgzOTA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24683907?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vegarab","html_url":"https:\/\/github.com\/vegarab","followers_url":"https:\/\/api.github.com\/users\/vegarab\/followers","following_url":"https:\/\/api.github.com\/users\/vegarab\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vegarab\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vegarab\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vegarab\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vegarab\/orgs","repos_url":"https:\/\/api.github.com\/users\/vegarab\/repos","events_url":"https:\/\/api.github.com\/users\/vegarab\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vegarab\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-08-05T23:13:00Z","updated_at":"2020-08-19T13:39:09Z","closed_at":"2020-08-19T13:39:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/479","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/479","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/479.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/479.patch","merged_at":"2020-08-19T13:39:09Z"},"body":"Added the METEOR metric. Can be used like this:\r\n\r\n```python\r\nimport nlp\r\nmeteor = nlp.load_metric('metrics\/meteor')\r\nmeteor.compute([\"some string\", \"some string\"], [\"some string\", \"some similar string\"])\r\n# {'meteor': 0.6411637931034483}\r\nmeteor.add(\"some string\", \"some string\")\r\nmeteor.add('some string\", \"some similar string\")\r\nmeteor.compute()\r\n# {'meteor': 0.6411637931034483}\r\n```\r\n\r\nUses [NLTK's implementation](https:\/\/www.nltk.org\/api\/nltk.translate.html#module-nltk.translate.meteor_score), [(source)](https:\/\/github.com\/nltk\/nltk\/blob\/develop\/nltk\/translate\/meteor_score.py)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/479\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/479\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/478","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/478\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/478\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/478\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/478","id":673178317,"node_id":"MDU6SXNzdWU2NzMxNzgzMTc=","number":478,"title":"Export TFRecord to GCP bucket","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-05T01:08:32Z","updated_at":"2020-08-05T01:21:37Z","closed_at":"2020-08-05T01:21:36Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Previously, I was writing TFRecords manually to GCP bucket with : `with tf.io.TFRecordWriter('gs:\/\/my_bucket\/x.tfrecord')`\r\n\r\nSince `0.4.0` is out with the `export()` function, I tried it. But it seems TFRecords cannot be directly written to GCP bucket.\r\n\r\n`dataset.export('local.tfrecord')` works fine, \r\nbut `dataset.export('gs:\/\/my_bucket\/x.tfrecord')` does not work. \r\n\r\nThere is no error message, I just can't find the file on my bucket...\r\n\r\n---\r\n\r\nLooking at the code, `nlp` is using `tf.data.experimental.TFRecordWriter`, while I was using `tf.io.TFRecordWriter`. \r\n\r\n**What's the difference between those 2 ? How can I write TFRecords files directly to GCP bucket ?**\r\n\r\n@jarednielsen @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/478\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/478\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/477","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/477\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/477\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/477\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/477","id":673142143,"node_id":"MDU6SXNzdWU2NzMxNDIxNDM=","number":477,"title":"Overview.ipynb throws exceptions with nlp 0.4.0","user":{"login":"mandy-li","id":23109219,"node_id":"MDQ6VXNlcjIzMTA5MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23109219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mandy-li","html_url":"https:\/\/github.com\/mandy-li","followers_url":"https:\/\/api.github.com\/users\/mandy-li\/followers","following_url":"https:\/\/api.github.com\/users\/mandy-li\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mandy-li\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mandy-li\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mandy-li\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mandy-li\/orgs","repos_url":"https:\/\/api.github.com\/users\/mandy-li\/repos","events_url":"https:\/\/api.github.com\/users\/mandy-li\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mandy-li\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-08-04T23:18:15Z","updated_at":"2021-08-03T06:02:15Z","closed_at":"2021-08-03T06:02:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"with nlp 0.4.0, the TensorFlow example in Overview.ipynb throws the following exceptions:\r\n\r\n\r\n---------------------------------------------------------------------------\r\nAttributeError Traceback (most recent call last)\r\n in \r\n----> 1 features = {x: train_tf_dataset[x].to_tensor(default_value=0, shape=[None, tokenizer.max_len]) for x in columns[:3]}\r\n 2 labels = {\"output_1\": train_tf_dataset[\"start_positions\"].to_tensor(default_value=0, shape=[None, 1])}\r\n 3 labels[\"output_2\"] = train_tf_dataset[\"end_positions\"].to_tensor(default_value=0, shape=[None, 1])\r\n 4 tfdataset = tf.data.Dataset.from_tensor_slices((features, labels)).batch(8)\r\n\r\n in (.0)\r\n----> 1 features = {x: train_tf_dataset[x].to_tensor(default_value=0, shape=[None, tokenizer.max_len]) for x in columns[:3]}\r\n 2 labels = {\"output_1\": train_tf_dataset[\"start_positions\"].to_tensor(default_value=0, shape=[None, 1])}\r\n 3 labels[\"output_2\"] = train_tf_dataset[\"end_positions\"].to_tensor(default_value=0, shape=[None, 1])\r\n 4 tfdataset = tf.data.Dataset.from_tensor_slices((features, labels)).batch(8)\r\n\r\nAttributeError: 'numpy.ndarray' object has no attribute 'to_tensor'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/477\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/477\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/476","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/476\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/476\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/476\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/476","id":672991854,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYyOTMyMTgx","number":476,"title":"CheckList","user":{"login":"marcotcr","id":698010,"node_id":"MDQ6VXNlcjY5ODAxMA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/698010?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/marcotcr","html_url":"https:\/\/github.com\/marcotcr","followers_url":"https:\/\/api.github.com\/users\/marcotcr\/followers","following_url":"https:\/\/api.github.com\/users\/marcotcr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/marcotcr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/marcotcr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/marcotcr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/marcotcr\/orgs","repos_url":"https:\/\/api.github.com\/users\/marcotcr\/repos","events_url":"https:\/\/api.github.com\/users\/marcotcr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/marcotcr\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-04T18:32:05Z","updated_at":"2020-09-03T19:34:08Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/476","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/476","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/476.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/476.patch","merged_at":null},"body":"Sorry for the large pull request.\r\n- Added checklists as datasets. I can't run `test_load_real_dataset` (see #474), but I can load the datasets successfully as shown in the example notebook\r\n- Added a checklist wrapper ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/476\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/476\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/475","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/475\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/475\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/475\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/475","id":672884595,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYyODQzMzQz","number":475,"title":"misc. bugs and quality of life","user":{"login":"joeddav","id":9353833,"node_id":"MDQ6VXNlcjkzNTM4MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9353833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joeddav","html_url":"https:\/\/github.com\/joeddav","followers_url":"https:\/\/api.github.com\/users\/joeddav\/followers","following_url":"https:\/\/api.github.com\/users\/joeddav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joeddav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joeddav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joeddav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joeddav\/orgs","repos_url":"https:\/\/api.github.com\/users\/joeddav\/repos","events_url":"https:\/\/api.github.com\/users\/joeddav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joeddav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-04T15:32:29Z","updated_at":"2020-08-17T21:14:08Z","closed_at":"2020-08-17T21:14:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/475","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/475","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/475.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/475.patch","merged_at":"2020-08-17T21:14:07Z"},"body":"A few misc. bugs and QOL improvements that I've come across in using the library. Let me know if you don't like any of them and I can adjust\/remove them.\r\n\r\n1. Printing datasets without a description field throws an error when formatting the `single_line_description`. This fixes that, and also adds some formatting to the repr to make it slightly more readable.\r\n```\r\n>>> print(list_datasets()[0])\r\nnlp.ObjectInfo(\r\n\tid='aeslc',\r\n\tdescription='A collection of email messages of employees in the Enron Corporation.There are two features: - email_body: email body text. - subject_line: email subject text.',\r\n\tfiles=[nlp.S3Object('aeslc.py'), nlp.S3Object('dataset_infos.json'), nlp.S3Object('dummy\/1.0.0\/dummy_data-zip-extracted\/dummy_data\/AESLC-master\/enron_subject_line\/dev\/allen-p_inbox_29.subject'), nlp.S3Object('dummy\/1.0.0\/dummy_data-zip-extracted\/dummy_data\/AESLC-master\/enron_subject_line\/test\/allen-p_inbox_24.subject'), nlp.S3Object('dummy\/1.0.0\/dummy_data-zip-extracted\/dummy_data\/AESLC-master\/enron_subject_line\/train\/allen-p_inbox_20.subject'), nlp.S3Object('dummy\/1.0.0\/dummy_data.zip'), nlp.S3Object('urls_checksums\/checksums.txt')]\r\n)\r\n```\r\n\r\n2. Add id-only option to `list_datasets` and `list_metrics` to allow the user to easily print out just the names of the datasets & metrics. I often found myself annoyed that this took so many strokes to do.\r\n\r\n```python\r\n[dataset.id for dataset in list_datasets()] # before\r\nlist_datasets(id_only=True) # after\r\n```\r\n\r\n3. Fix null-seed randomization caching. When using `train_test_split` and `shuffle`, the computation was being cached even without a seed or generator being passed. The result was that calling `.shuffle` more than once on the same dataset didn't do anything without passing a distinct seed or generator. Likewise with `train_test_split`.\r\n\r\n4. Indexing by iterables of bool. I added support for passing an iterable of type bool to `_getitem` as a numpy\/pandas-like indexing method. Let me know if you think it's redundant with `filter` (I know it's not optimal memory-wise), but I think it's nice to have as a lightweight alternative to do simple things without having to create a copy of the entire dataset, e.g.\r\n\r\n```python\r\ndataset[dataset['label'] == 0] # numpy-like bool indexing to look at instances with labels of 0\r\n```\r\n\r\n5. Add an `input_column` argument to `map` and `filter`, which allows you to filter\/map on a particular column rather than passing the whole dict to the function. Also adds `fn_kwargs` to be passed to the function. I think these together make mapping much cleaner in many cases such as mono-column tokenization:\r\n\r\n```python\r\n# before\r\ndataset = dataset.map(lambda batch: tokenizer(batch[\"text\"])\r\n# after\r\ndataset = dataset.map(tokenizer, input_column=\"text\")\r\ndataset = dataset.map(tokenizer, input_column=\"text\", fn_kwargs={\"truncation\": True, \"padding\": True})\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/475\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/475\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/474","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/474\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/474\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/474\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/474","id":672407330,"node_id":"MDU6SXNzdWU2NzI0MDczMzA=","number":474,"title":"test_load_real_dataset when config has BUILDER_CONFIGS that matter","user":{"login":"marcotcr","id":698010,"node_id":"MDQ6VXNlcjY5ODAxMA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/698010?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/marcotcr","html_url":"https:\/\/github.com\/marcotcr","followers_url":"https:\/\/api.github.com\/users\/marcotcr\/followers","following_url":"https:\/\/api.github.com\/users\/marcotcr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/marcotcr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/marcotcr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/marcotcr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/marcotcr\/orgs","repos_url":"https:\/\/api.github.com\/users\/marcotcr\/repos","events_url":"https:\/\/api.github.com\/users\/marcotcr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/marcotcr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-03T23:46:36Z","updated_at":"2020-09-07T14:53:13Z","closed_at":"2020-09-07T14:53:13Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It a dataset has custom `BUILDER_CONFIGS` with non-keyword arguments (or keyword arguments with non default values), the config is not loaded during the test and causes an error.\r\nI think the problem is that `test_load_real_dataset` calls `load_dataset` with `data_dir=temp_data_dir` ([here](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/tests\/test_dataset_common.py#L200)). This causes [this line](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/src\/nlp\/builder.py#L201) to always be false because `config_kwargs` is not `None`. [This line](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/src\/nlp\/builder.py#L222) will be run instead, which doesn't use `BUILDER_CONFIGS`.\r\n\r\nFor an example, you can try running the test for lince:\r\n` RUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_real_dataset_lince`\r\nwhich yields\r\n> E TypeError: __init__() missing 3 required positional arguments: 'colnames', 'classes', and 'label_column'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/474\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/474\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/473","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/473\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/473\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/473\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/473","id":672007247,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYyMTIwNzU4","number":473,"title":"add DoQA dataset (ACL 2020)","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-03T11:26:52Z","updated_at":"2020-09-10T17:19:11Z","closed_at":"2020-09-03T11:44:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/473","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/473","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/473.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/473.patch","merged_at":"2020-09-03T11:44:14Z"},"body":"add DoQA dataset (ACL 2020) http:\/\/ixa.eus\/node\/12931","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/473\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/473\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/472","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/472\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/472\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/472\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/472","id":672000745,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYyMTE1MjA4","number":472,"title":"add crd3 dataset","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-03T11:15:02Z","updated_at":"2020-08-03T11:22:10Z","closed_at":"2020-08-03T11:22:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/472","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/472","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/472.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/472.patch","merged_at":"2020-08-03T11:22:09Z"},"body":"opening new PR for CRD3 dataset (ACL2020) to fix the circle CI problems","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/472\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/472\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/471","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/471\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/471\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/471\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/471","id":671996423,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYyMTExNTU1","number":471,"title":"add reuters21578 dataset","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-08-03T11:07:14Z","updated_at":"2020-09-03T10:08:03Z","closed_at":"2020-09-03T09:58:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/471","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/471","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/471.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/471.patch","merged_at":"2020-09-03T09:58:50Z"},"body":"new PR to add the reuters21578 dataset and fix the circle CI problems.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/471\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/471\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/470","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/470\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/470\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/470\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/470","id":671952276,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYyMDc0MzQ0","number":470,"title":"Adding IWSLT 2017 dataset.","user":{"login":"Narsil","id":204321,"node_id":"MDQ6VXNlcjIwNDMyMQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/204321?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Narsil","html_url":"https:\/\/github.com\/Narsil","followers_url":"https:\/\/api.github.com\/users\/Narsil\/followers","following_url":"https:\/\/api.github.com\/users\/Narsil\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Narsil\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Narsil\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Narsil\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Narsil\/orgs","repos_url":"https:\/\/api.github.com\/users\/Narsil\/repos","events_url":"https:\/\/api.github.com\/users\/Narsil\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Narsil\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-08-03T09:52:39Z","updated_at":"2020-09-07T12:33:30Z","closed_at":"2020-09-07T12:33:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/470","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/470","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/470.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/470.patch","merged_at":"2020-09-07T12:33:30Z"},"body":"Created a [IWSLT 2017](https:\/\/sites.google.com\/site\/iwsltevaluation2017\/TED-tasks) dataset script for the *multilingual data*.\r\n\r\n```\r\nBilingual data: {Arabic, German, French, Japanese, Korean, Chinese} <-> English\r\nMultilingual data: German, English, Italian, Dutch, Romanian. (Any pair)\r\n```\r\n\r\nI'm unsure how to handle bilingual vs multilingual. Given `nlp` architecture a Config option seems to be the way to go, however, it might be a bit confusing to have different language pairs with different option. Using just language pairs is not viable as English to German exists in both.\r\n\r\nAny opinion on how that should be done ?\r\nEDIT: I decided to just omit de-en from multilingual as it's only a subset of the bilingual one. That way only language pairs exist.\r\nEDIT : Could be interesting for #438 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/470\/reactions","total_count":2,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":2,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/470\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/469","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/469\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/469\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/469\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/469","id":671876963,"node_id":"MDU6SXNzdWU2NzE4NzY5NjM=","number":469,"title":"invalid data type 'str' at _convert_outputs in arrow_dataset.py","user":{"login":"Murgates","id":30617486,"node_id":"MDQ6VXNlcjMwNjE3NDg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30617486?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Murgates","html_url":"https:\/\/github.com\/Murgates","followers_url":"https:\/\/api.github.com\/users\/Murgates\/followers","following_url":"https:\/\/api.github.com\/users\/Murgates\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Murgates\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Murgates\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Murgates\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Murgates\/orgs","repos_url":"https:\/\/api.github.com\/users\/Murgates\/repos","events_url":"https:\/\/api.github.com\/users\/Murgates\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Murgates\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-08-03T07:48:29Z","updated_at":"2020-10-22T09:04:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I trying to build multi label text classifier model using Transformers lib. \r\n\r\nI'm using Transformers NLP to load the data set, while calling trainer.train() method. It throws the following error \r\n\r\nFile \"C:\\***\\arrow_dataset.py\", line 343, in _convert_outputs\r\n v = command(v)\r\nTypeError: new(): invalid data type 'str'\r\n\r\nI'm using pyarrow 1.0.0. And I have simple custom data set with Text and Integer Label. \r\nEx: Data\r\n Text , Label #Column Header\r\n I'm facing an Network issue, 1\r\n I forgot my password, 2\r\n\r\nError StackTrace:\r\n\r\nFile \"C:\\**\\transformers\\trainer.py\", line 492, in train\r\n for step, inputs in enumerate(epoch_iterator):\r\n File \"C:\\**\\tqdm\\std.py\", line 1104, in __iter__\r\n for obj in iterable:\r\n File \"C:\\**\\torch\\utils\\data\\dataloader.py\", line 345, in __next__\r\n data = self._next_data()\r\n File \"C:\\**\\torch\\utils\\data\\dataloader.py\", line 385, in _next_data\r\n data = self._dataset_fetcher.fetch(index) # may raise StopIteration\r\n File \"C:\\**\\torch\\utils\\data\\_utils\\fetch.py\", line 44, in fetch\r\n data = [self.dataset[idx] for idx in possibly_batched_index]\r\n File \"C:\\**\\torch\\utils\\data\\_utils\\fetch.py\", line 44, in \r\n data = [self.dataset[idx] for idx in possibly_batched_index]\r\n File \"C:\\**\\nlp\\arrow_dataset.py\", line 414, in __getitem__\r\n output_all_columns=self._output_all_columns,\r\n File \"C:\\**\\nlp\\arrow_dataset.py\", line 403, in _getitem\r\n outputs, format_type=format_type, format_columns=format_columns, output_all_columns=output_all_columns\r\n File \"C:\\**\\nlp\\arrow_dataset.py\", line 343, in _convert_outputs\r\n v = command(v)\r\nTypeError: new(): invalid data type 'str'\r\n \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/469\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/469\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/468","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/468\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/468\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/468\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/468","id":671622441,"node_id":"MDU6SXNzdWU2NzE2MjI0NDE=","number":468,"title":"UnicodeDecodeError while loading PAN-X task of XTREME dataset","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-08-02T14:05:10Z","updated_at":"2020-08-20T08:16:08Z","closed_at":"2020-08-20T08:16:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi \ud83e\udd17 team!\r\n\r\n## Description of the problem\r\nI'm running into a `UnicodeDecodeError` while trying to load the PAN-X subset the XTREME dataset: \r\n\r\n```\r\n---------------------------------------------------------------------------\r\nUnicodeDecodeError Traceback (most recent call last)\r\n in \r\n----> 1 dataset = load_dataset(\"xtreme\", \"PAN-X.en\", data_dir='.\/data')\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n 528 ignore_verifications = ignore_verifications or save_infos\r\n 529 # Download\/copy dataset processing script\r\n--> 530 module_path, hash = prepare_module(path, download_config=download_config, dataset=True)\r\n 531 \r\n 532 # Get dataset builder class from the processing script\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/load.py in prepare_module(path, download_config, dataset, force_local_path, **download_kwargs)\r\n 265 \r\n 266 # Download external imports if needed\r\n--> 267 imports = get_imports(local_path)\r\n 268 local_imports = []\r\n 269 library_imports = []\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/load.py in get_imports(file_path)\r\n 156 lines = []\r\n 157 with open(file_path, mode=\"r\") as f:\r\n--> 158 lines.extend(f.readlines())\r\n 159 \r\n 160 logger.info(\"Checking %s for additional imports.\", file_path)\r\n\r\n\/usr\/lib\/python3.6\/encodings\/ascii.py in decode(self, input, final)\r\n 24 class IncrementalDecoder(codecs.IncrementalDecoder):\r\n 25 def decode(self, input, final=False):\r\n---> 26 return codecs.ascii_decode(input, self.errors)[0]\r\n 27 \r\n 28 class StreamWriter(Codec,codecs.StreamWriter):\r\n\r\nUnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 111: ordinal not in range(128)\r\n```\r\n\r\n## Steps to reproduce\r\nInstall from nlp's master branch\r\n```python\r\npip install git+https:\/\/github.com\/huggingface\/nlp.git\r\n```\r\nthen run\r\n```python\r\nfrom nlp import load_dataset\r\n# AmazonPhotos.zip is located in data\/\r\ndataset = load_dataset(\"xtreme\", \"PAN-X.en\", data_dir='.\/data')\r\n```\r\n\r\n## OS \/ platform details\r\n\r\n- `nlp` version: latest from master\r\n- Platform: Linux-4.15.0-72-generic-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.6.9\r\n- PyTorch version (GPU?): 1.4.0 (True)\r\n- Tensorflow version (GPU?): 2.1.0 (True)\r\n- Using GPU in script?: True\r\n- Using distributed or parallel set-up in script?: False\r\n\r\n## Proposed solution\r\nEither change [line 762](https:\/\/github.com\/huggingface\/nlp\/blob\/7ada00b1d62f94eee22a7df38c6b01e3f27194b7\/datasets\/xtreme\/xtreme.py#L762) in `xtreme.py` to include UTF-8 encoding:\r\n\r\n```\r\n# old\r\nwith open(filepath) as f\r\n# new\r\nwith open(filepath, encoding='utf-8') as f\r\n```\r\n\r\nor raise a warning that suggests setting the locale explicitly, e.g.\r\n```python\r\nimport locale\r\nlocale.setlocale(locale.LC_ALL, 'C.UTF-8')\r\n```\r\nI have a preference for the first solution. Let me know if you agree and I'll be happy to implement the simple fix!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/468\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/468\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/467","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/467\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/467\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/467\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/467","id":671580010,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYxNzgwMzUy","number":467,"title":"DOCS: Fix typo","user":{"login":"Bharat123rox","id":13381361,"node_id":"MDQ6VXNlcjEzMzgxMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13381361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Bharat123rox","html_url":"https:\/\/github.com\/Bharat123rox","followers_url":"https:\/\/api.github.com\/users\/Bharat123rox\/followers","following_url":"https:\/\/api.github.com\/users\/Bharat123rox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Bharat123rox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Bharat123rox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Bharat123rox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Bharat123rox\/orgs","repos_url":"https:\/\/api.github.com\/users\/Bharat123rox\/repos","events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-08-02T08:59:37Z","updated_at":"2020-08-02T13:52:27Z","closed_at":"2020-08-02T09:18:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/467","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/467","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/467.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/467.patch","merged_at":"2020-08-02T09:18:54Z"},"body":"Fix typo from dictionnary -> dictionary","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/467\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/467\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/466","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/466\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/466\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/466\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/466","id":670766891,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYxMDEzOTM0","number":466,"title":"[METRICS] Various improvements on metrics","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-08-01T11:03:45Z","updated_at":"2020-08-17T15:15:00Z","closed_at":"2020-08-17T15:14:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/466","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/466","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/466.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/466.patch","merged_at":"2020-08-17T15:14:59Z"},"body":"- Disallow the use of positional arguments to avoid `predictions` vs `references` mistakes\r\n- Allow to directly feed numpy\/pytorch\/tensorflow\/pandas objects in metrics","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/466\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/466\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/465","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/465\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/465\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/465\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/465","id":669889779,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYwMjEwODYw","number":465,"title":"Keep features after transform","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-31T14:43:21Z","updated_at":"2020-07-31T18:27:33Z","closed_at":"2020-07-31T18:27:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/465","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/465","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/465.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/465.patch","merged_at":"2020-07-31T18:27:32Z"},"body":"When applying a transform like `map`, some features were lost (and inferred features were used).\r\nIt was the case for ClassLabel, Translation, etc.\r\n\r\nTo fix that, I did some modifications in the `ArrowWriter`:\r\n\r\n- added the `update_features` parameter. When it's `True`, then the features specified by the user (if any) can be updated with inferred features if their type don't match. `map` transform sets `update_features=True` when writing to cache file or buffer. Features won't change by default in `map`.\r\n\r\n- added the `with_metadata` parameter. If `True`, the `features` (after update) will be written inside the metadata of the schema in this format:\r\n```\r\n{\r\n \"huggingface\": {\"features\" : }\r\n} \r\n```\r\nThen, once a dataset is instantiated without info\/features, these metadata are used to set the features of the dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/465\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/465\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/464","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/464\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/464\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/464\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/464","id":669767381,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYwMTAxNDYz","number":464,"title":"Add rename, remove and cast in-place operations","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-31T12:30:21Z","updated_at":"2020-07-31T15:50:02Z","closed_at":"2020-07-31T15:50:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/464","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/464","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/464.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/464.patch","merged_at":"2020-07-31T15:50:00Z"},"body":"Add a bunch of in-place operation leveraging the Arrow back-end to rename and remove columns and cast to new features without using the more expensive `map` method.\r\n\r\nThese methods are added to `Dataset` as well as `DatasetDict`.\r\n\r\nAdded tests for these new methods and add the methods to the doc.\r\n\r\nNaming follows the new pattern with a trailing underscore indicating in-place methods.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/464\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/464\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/463","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/463\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/463\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/463\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/463","id":669735455,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYwMDcyNjQ1","number":463,"title":"Add dataset\/mlsum","user":{"login":"RachelKer","id":36986299,"node_id":"MDQ6VXNlcjM2OTg2Mjk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36986299?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RachelKer","html_url":"https:\/\/github.com\/RachelKer","followers_url":"https:\/\/api.github.com\/users\/RachelKer\/followers","following_url":"https:\/\/api.github.com\/users\/RachelKer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RachelKer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RachelKer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RachelKer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RachelKer\/orgs","repos_url":"https:\/\/api.github.com\/users\/RachelKer\/repos","events_url":"https:\/\/api.github.com\/users\/RachelKer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RachelKer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-31T11:50:52Z","updated_at":"2020-08-24T14:54:42Z","closed_at":"2020-08-24T14:54:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/463","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/463","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/463.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/463.patch","merged_at":null},"body":"New pull request that should correct the previous errors. \r\n\r\nThe load_real_data stills fails because it is looking for a default dataset URL that does not exists, this does not happen when loading the dataset with load_dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/463\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/463\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/462","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/462\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/462\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/462\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/462","id":669715547,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYwMDU0NDgz","number":462,"title":"add DoQA (ACL 2020) dataset","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-31T11:25:56Z","updated_at":"2020-08-03T11:28:27Z","closed_at":"2020-08-03T11:28:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/462","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/462","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/462.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/462.patch","merged_at":null},"body":"adds DoQA (ACL 2020) dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/462\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/462\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/461","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/461\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/461\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/461\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/461","id":669703508,"node_id":"MDExOlB1bGxSZXF1ZXN0NDYwMDQzNDY5","number":461,"title":"Doqa","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-31T11:11:12Z","updated_at":"2020-07-31T11:13:15Z","closed_at":"2020-07-31T11:13:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/461","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/461","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/461.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/461.patch","merged_at":null},"body":"add DoQA (ACL 2020) dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/461\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/461\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/460","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/460\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/460\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/460\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/460","id":669585256,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU5OTM2OTU2","number":460,"title":"Fix KeyboardInterrupt in map and bad indices in select","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-31T08:57:15Z","updated_at":"2020-07-31T11:32:19Z","closed_at":"2020-07-31T11:32:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/460","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/460","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/460.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/460.patch","merged_at":"2020-07-31T11:32:18Z"},"body":"If you interrupted a map function while it was writing, the cached file was not discarded.\r\nTherefore the next time you called map, it was loading an incomplete arrow file.\r\n\r\nWe had the same issue with select if there was a bad indice at one point.\r\n\r\nTo fix that I used temporary files that are renamed once everything is finished.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/460\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/460\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/459","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/459\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/459\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/459\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/459","id":669545437,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU5OTAxMjEy","number":459,"title":"[Breaking] Update Dataset and DatasetDict API","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-31T08:11:33Z","updated_at":"2020-08-26T08:28:36Z","closed_at":"2020-08-26T08:28:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/459","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/459","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/459.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/459.patch","merged_at":"2020-08-26T08:28:35Z"},"body":"This PR contains a few breaking changes so it's probably good to keep it for the next (major) release:\r\n- rename the `flatten`, `drop` and `dictionary_encode_column` methods in `flatten_`, `drop_` and `dictionary_encode_column_` to indicate that these methods have in-place effects as discussed in #166. From now on we should keep the convention of having a trailing underscore for methods which have an in-place effet. I also adopt the conversion of not returning the (self) dataset for these methods. This is different than what PyTorch does for instance (`model.to()` is in-place but return the self model) but I feel like it's a safer approach in terms of UX.\r\n- remove the `dataset.columns` property which returns a low-level Apache Arrow object and should not be used by users. Similarly, remove `dataset. nbytes` which we don't really want to expose in this bare-bone format.\r\n- add a few more properties and methods to `DatasetDict`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/459\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/459\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/458","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/458\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/458\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/458\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/458","id":668972666,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU5Mzk5ODg2","number":458,"title":"Install CoVal metric from github","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-30T16:59:25Z","updated_at":"2020-07-31T13:56:33Z","closed_at":"2020-07-31T13:56:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/458","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/458","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/458.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/458.patch","merged_at":"2020-07-31T13:56:33Z"},"body":"Changed the import statements in `coval.py` to direct the user to install the original package from github if it's not already installed (the warning will only display properly after merging [PR455](https:\/\/github.com\/huggingface\/nlp\/pull\/455))\r\n\r\nAlso changed the function call to use named rather than positional arguments.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/458\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/458\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/457","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/457\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/457\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/457\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/457","id":668898386,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU5MzMyOTM1","number":457,"title":"add set_format to DatasetDict + tests","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-30T15:53:20Z","updated_at":"2020-07-30T17:34:36Z","closed_at":"2020-07-30T17:34:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/457","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/457","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/457.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/457.patch","merged_at":"2020-07-30T17:34:34Z"},"body":"Add the `set_format` and `formated_as` and `reset_format` to `DatasetDict`.\r\nAdd tests to these for `Dataset` and `DatasetDict`.\r\nFix some bugs uncovered by the tests for `pandas` formating.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/457\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/457\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/456","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/456\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/456\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/456\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/456","id":668723785,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU5MTc1MTY0","number":456,"title":"add crd3(ACL 2020) dataset","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-30T13:28:35Z","updated_at":"2020-08-03T11:28:52Z","closed_at":"2020-08-03T11:28:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/456","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/456","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/456.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/456.patch","merged_at":null},"body":"This PR adds the **Critical Role Dungeons and Dragons Dataset** published at ACL 2020","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/456\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/456\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/455","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/455\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/455\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/455\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/455","id":668037965,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU4NTk4NTUw","number":455,"title":"Add bleurt","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-07-29T18:08:32Z","updated_at":"2020-07-31T13:56:14Z","closed_at":"2020-07-31T13:56:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/455","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/455","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/455.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/455.patch","merged_at":"2020-07-31T13:56:14Z"},"body":"This PR adds the BLEURT metric to the library.\r\n\r\nThe BLEURT `Metric` downloads a TF checkpoint corresponding to its `config_name` at creation (in the `_info` function). Default is set to `bleurt-base-128`.\r\n\r\nNote that the default in the original package is `bleurt-tiny-128`, but they throw a warning and recommend using `bleurt-base-128` instead. I think it's safer to have our users have a functioning metric when they call the default behavior, we'll address discrepancies in the issues\/discussions if it comes up.\r\n\r\nIn addition to the BLEURT file, `load.py` was changed so we can ask users to pip install the required packages from git when they have a `setup.py` but are not on PyPL\r\n\r\ncc @ankparikh @tsellam","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/455\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/455\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/454","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/454\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/454\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/454\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/454","id":668011577,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU4NTc3MzA3","number":454,"title":"Create SECURITY.md","user":{"login":"ChenZehong13","id":56394989,"node_id":"MDQ6VXNlcjU2Mzk0OTg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/56394989?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ChenZehong13","html_url":"https:\/\/github.com\/ChenZehong13","followers_url":"https:\/\/api.github.com\/users\/ChenZehong13\/followers","following_url":"https:\/\/api.github.com\/users\/ChenZehong13\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ChenZehong13\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ChenZehong13\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ChenZehong13\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ChenZehong13\/orgs","repos_url":"https:\/\/api.github.com\/users\/ChenZehong13\/repos","events_url":"https:\/\/api.github.com\/users\/ChenZehong13\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ChenZehong13\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-29T17:23:34Z","updated_at":"2020-07-29T21:45:52Z","closed_at":"2020-07-29T21:45:52Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/454","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/454","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/454.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/454.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/454\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/454\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/453","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/453\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/453\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/453\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/453","id":667728247,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU4MzQwNzky","number":453,"title":"add builder tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-29T10:22:07Z","updated_at":"2020-07-29T11:14:06Z","closed_at":"2020-07-29T11:14:05Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/453","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/453","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/453.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/453.patch","merged_at":"2020-07-29T11:14:05Z"},"body":"I added `as_dataset` and `download_and_prepare` to the tests","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/453\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/453\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/452","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/452\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/452\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/452\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/452","id":667498295,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU4MTUzNjQy","number":452,"title":"Guardian authorship dataset","user":{"login":"malikaltakrori","id":25109412,"node_id":"MDQ6VXNlcjI1MTA5NDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25109412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/malikaltakrori","html_url":"https:\/\/github.com\/malikaltakrori","followers_url":"https:\/\/api.github.com\/users\/malikaltakrori\/followers","following_url":"https:\/\/api.github.com\/users\/malikaltakrori\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/malikaltakrori\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/malikaltakrori\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/malikaltakrori\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/malikaltakrori\/orgs","repos_url":"https:\/\/api.github.com\/users\/malikaltakrori\/repos","events_url":"https:\/\/api.github.com\/users\/malikaltakrori\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/malikaltakrori\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-07-29T02:23:57Z","updated_at":"2020-08-20T15:09:57Z","closed_at":"2020-08-20T15:07:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/452","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/452","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/452.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/452.patch","merged_at":"2020-08-20T15:07:55Z"},"body":"A new dataset: Guardian news articles for authorship attribution\r\n\r\n**tests passed:**\r\npython nlp-cli dummy_data datasets\/guardian_authorship --save_infos --all_configs\r\n\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_dataset_all_configs_guardian_authorship\r\n\r\n**Tests failed:**\r\nReal data: RUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_real_dataset_guardian_authorship\r\noutput: __init__() missing 3 required positional arguments: 'train_folder', 'valid_folder', and 'tes...' \r\n\r\nRemarks: This is the init function of my class. I am not sure why it passes in both my tests and with nlp-cli, but fails here. By the way, I ran this command with another 2 datasets and they failed:\r\n* _glue - OSError: Cannot find data file.\r\n*_newsgroup - FileNotFoundError: Local file datasets\/newsgroup\/dummy\/18828_comp.graphics\/3.0.0\/dummy_data.zip doesn't exist\r\n\r\nThank you for letting us contribute to such a huge and important library! \r\n\r\nEDIT:\r\nI was able to fix the dummy_data issue. This dataset has around 14 configurations. I was testing with only 2, but their versions were not in a sequence, they were V1.0.0 and V.12.0.0. It seems that the testing code generates testes for all the versions from 0 to MAX, and was testing for versions (and dummy_data.zip files) that do not exist. I fixed that by changing the versions to 1 and 2.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/452\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/452\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/451","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/451\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/451\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/451\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/451","id":667210468,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU3OTIxNDMx","number":451,"title":"Fix csv\/json\/txt cache dir","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-07-28T16:30:51Z","updated_at":"2020-07-29T13:57:23Z","closed_at":"2020-07-29T13:57:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/451","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/451","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/451.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/451.patch","merged_at":"2020-07-29T13:57:22Z"},"body":"The cache dir for csv\/json\/txt datasets was always the same. This is an issue because it should be different depending on the data files provided by the user.\r\n\r\nTo fix that, I added a line that use the hash of the data files provided by the user to define the cache dir.\r\n\r\nThis should fix #444 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/451\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/451\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/450","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/450\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/450\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/450\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/450","id":667074120,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU3ODA5ODA2","number":450,"title":"add sogou_news","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-28T13:29:10Z","updated_at":"2020-07-29T13:30:18Z","closed_at":"2020-07-29T13:30:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/450","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/450","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/450.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/450.patch","merged_at":"2020-07-29T13:30:17Z"},"body":"This PR adds the sogou news dataset\r\n#353 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/450\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/450\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/449","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/449\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/449\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/449\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/449","id":666898923,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU3NjY0NjYx","number":449,"title":"add reuters21578 dataset","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-28T08:58:12Z","updated_at":"2020-08-03T11:10:31Z","closed_at":"2020-08-03T11:10:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/449","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/449","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/449.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/449.patch","merged_at":null},"body":"This PR adds the `Reuters_21578` dataset https:\/\/kdd.ics.uci.edu\/databases\/reuters21578\/reuters21578.html \r\n#353 \r\n\r\nThe datasets is a lit of `.sgm` files which are a bit different from xml file indeed `xml.etree` couldn't be used to read files. I consider them as text file (to avoid using external library) and read line by line (maybe there is a better way to do, happy to get your opinion on it)\r\n\r\nIn the Readme file 3 ways to split the dataset are given.:\r\n\r\n- The Modified Lewis (\"ModLewis\") Split: train, test and unused-set\r\n\r\n- The Modified Apte (\"ModApte\") Split : train, test and unused-set\r\n\r\n- The Modified Hayes (\"ModHayes\") Split: train and test\r\n\r\nHere I consider the last one as the readme file highlight that this split provides the ability to compare results with those of the 2 first splits.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/449\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/449\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/448","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/448\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/448\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/448\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/448","id":666893443,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU3NjYwMDU2","number":448,"title":"add aws load metric test","user":{"login":"idoh","id":5303103,"node_id":"MDQ6VXNlcjUzMDMxMDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5303103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/idoh","html_url":"https:\/\/github.com\/idoh","followers_url":"https:\/\/api.github.com\/users\/idoh\/followers","following_url":"https:\/\/api.github.com\/users\/idoh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/idoh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/idoh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/idoh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/idoh\/orgs","repos_url":"https:\/\/api.github.com\/users\/idoh\/repos","events_url":"https:\/\/api.github.com\/users\/idoh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/idoh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-28T08:50:22Z","updated_at":"2020-07-28T15:02:27Z","closed_at":"2020-07-28T15:02:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/448","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/448","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/448.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/448.patch","merged_at":"2020-07-28T15:02:26Z"},"body":"Following issue #445\r\nAdded a test to recognize import errors of all metrics","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/448\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/448\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/447","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/447\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/447\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/447\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/447","id":666842115,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU3NjE2NDA0","number":447,"title":"[BugFix] fix wrong import of DEFAULT_TOKENIZER","user":{"login":"idoh","id":5303103,"node_id":"MDQ6VXNlcjUzMDMxMDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5303103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/idoh","html_url":"https:\/\/github.com\/idoh","followers_url":"https:\/\/api.github.com\/users\/idoh\/followers","following_url":"https:\/\/api.github.com\/users\/idoh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/idoh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/idoh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/idoh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/idoh\/orgs","repos_url":"https:\/\/api.github.com\/users\/idoh\/repos","events_url":"https:\/\/api.github.com\/users\/idoh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/idoh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-28T07:41:10Z","updated_at":"2020-07-28T12:58:01Z","closed_at":"2020-07-28T12:52:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/447","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/447","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/447.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/447.patch","merged_at":"2020-07-28T12:52:05Z"},"body":"Fixed the path to `DEFAULT_TOKENIZER`\r\n#445","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/447\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/447\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/446","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/446\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/446\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/446\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/446","id":666837351,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU3NjEyNTg5","number":446,"title":"[BugFix] fix wrong import of DEFAULT_TOKENIZER","user":{"login":"idoh","id":5303103,"node_id":"MDQ6VXNlcjUzMDMxMDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5303103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/idoh","html_url":"https:\/\/github.com\/idoh","followers_url":"https:\/\/api.github.com\/users\/idoh\/followers","following_url":"https:\/\/api.github.com\/users\/idoh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/idoh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/idoh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/idoh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/idoh\/orgs","repos_url":"https:\/\/api.github.com\/users\/idoh\/repos","events_url":"https:\/\/api.github.com\/users\/idoh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/idoh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-28T07:32:47Z","updated_at":"2020-07-28T07:34:46Z","closed_at":"2020-07-28T07:33:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/446","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/446","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/446.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/446.patch","merged_at":null},"body":"Fixed the path to `DEFAULT_TOKENIZER`\r\n#445 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/446\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/446\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/445","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/445\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/445\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/445\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/445","id":666836658,"node_id":"MDU6SXNzdWU2NjY4MzY2NTg=","number":445,"title":"DEFAULT_TOKENIZER import error in sacrebleu","user":{"login":"idoh","id":5303103,"node_id":"MDQ6VXNlcjUzMDMxMDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5303103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/idoh","html_url":"https:\/\/github.com\/idoh","followers_url":"https:\/\/api.github.com\/users\/idoh\/followers","following_url":"https:\/\/api.github.com\/users\/idoh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/idoh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/idoh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/idoh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/idoh\/orgs","repos_url":"https:\/\/api.github.com\/users\/idoh\/repos","events_url":"https:\/\/api.github.com\/users\/idoh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/idoh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-28T07:31:30Z","updated_at":"2020-07-28T12:58:56Z","closed_at":"2020-07-28T12:58:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Latest Version 0.3.0\r\n\r\nWhen loading the metric \"sacrebleu\" there is an import error due to the wrong path\r\n![image](https:\/\/user-images.githubusercontent.com\/5303103\/88633063-2c5e5f00-d0bd-11ea-8ca8-4704dc975433.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/445\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/445\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/444","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/444\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/444\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/444\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/444","id":666280842,"node_id":"MDU6SXNzdWU2NjYyODA4NDI=","number":444,"title":"Keep loading old file even I specify a new file in load_dataset","user":{"login":"joshhu","id":10594453,"node_id":"MDQ6VXNlcjEwNTk0NDUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10594453?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joshhu","html_url":"https:\/\/github.com\/joshhu","followers_url":"https:\/\/api.github.com\/users\/joshhu\/followers","following_url":"https:\/\/api.github.com\/users\/joshhu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joshhu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joshhu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joshhu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joshhu\/orgs","repos_url":"https:\/\/api.github.com\/users\/joshhu\/repos","events_url":"https:\/\/api.github.com\/users\/joshhu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joshhu\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2020-07-27T13:08:06Z","updated_at":"2020-07-29T13:57:22Z","closed_at":"2020-07-29T13:57:22Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I used load a file called 'a.csv' by \r\n```\r\ndataset = load_dataset('csv', data_file='.\/a.csv')\r\n```\r\nAnd after a while, I tried to load another csv called 'b.csv'\r\n```\r\ndataset = load_dataset('csv', data_file='.\/b.csv')\r\n```\r\nHowever, the new dataset seems to remain the old 'a.csv' and not loading new csv file.\r\n\r\nEven worse, after I load a.csv, the load_dataset function keeps loading the 'a.csv' afterward. \r\n\r\nIs this a cache problem?\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/444\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/444\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/443","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/443\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/443\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/443\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/443","id":666246716,"node_id":"MDU6SXNzdWU2NjYyNDY3MTY=","number":443,"title":"Cannot unpickle saved .pt dataset with torch.save()\/load()","user":{"login":"vegarab","id":24683907,"node_id":"MDQ6VXNlcjI0NjgzOTA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24683907?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vegarab","html_url":"https:\/\/github.com\/vegarab","followers_url":"https:\/\/api.github.com\/users\/vegarab\/followers","following_url":"https:\/\/api.github.com\/users\/vegarab\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vegarab\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vegarab\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vegarab\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vegarab\/orgs","repos_url":"https:\/\/api.github.com\/users\/vegarab\/repos","events_url":"https:\/\/api.github.com\/users\/vegarab\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vegarab\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-27T12:13:37Z","updated_at":"2020-07-27T13:05:11Z","closed_at":"2020-07-27T13:05:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Saving a formatted torch dataset to file using `torch.save()`. Loading the same file fails during unpickling:\r\n\r\n```python\r\n>>> import torch\r\n>>> import nlp\r\n\r\n>>> squad = nlp.load_dataset(\"squad.py\", split=\"train\")\r\n>>> squad\r\nDataset(features: {'source_text': Value(dtype='string', id=None), 'target_text': Value(dtype='string', id=None)}, num_rows: 87599)\r\n>>> squad = squad.map(create_features, batched=True)\r\n>>> squad.set_format(type=\"torch\", columns=[\"source_ids\", \"target_ids\", \"attention_mask\"])\r\n>>> torch.save(squad, \"squad.pt\")\r\n\r\n>>> squad_pt = torch.load(\"squad.pt\")\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/vegarab\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/torch\/serialization.py\", line 593, in load\r\n return _legacy_load(opened_file, map_location, pickle_module, **pickle_load_args)\r\n File \"\/home\/vegarab\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/torch\/serialization.py\", line 773, in _legacy_load\r\n result = unpickler.load()\r\n File \"\/home\/vegarab\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/nlp\/splits.py\", line 493, in __setitem__\r\n raise ValueError(\"Cannot add elem. Use .add() instead.\")\r\nValueError: Cannot add elem. Use .add() instead.\r\n```\r\nwhere `create_features` is a function that tokenizes the data using `batch_encode_plus` and returns a Dict with `input_ids`, `target_ids` and `attention_mask`. \r\n```python\r\ndef create_features(batch):\r\n source_text_encoding = tokenizer.batch_encode_plus(\r\n batch[\"source_text\"],\r\n max_length=max_source_length,\r\n pad_to_max_length=True,\r\n truncation=True)\r\n\r\n target_text_encoding = tokenizer.batch_encode_plus(\r\n batch[\"target_text\"],\r\n max_length=max_target_length,\r\n pad_to_max_length=True,\r\n truncation=True)\r\n\r\n features = {\r\n \"source_ids\": source_text_encoding[\"input_ids\"],\r\n \"target_ids\": target_text_encoding[\"input_ids\"],\r\n \"attention_mask\": source_text_encoding[\"attention_mask\"]\r\n }\r\n\r\n return features\r\n```\r\n\r\nI found a similar issue in [issue 5267 in the huggingface\/transformers repo](https:\/\/github.com\/huggingface\/transformers\/issues\/5267) which was solved by downgrading to `nlp==0.2.0`. That did not solve this problem, however. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/443\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/443\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/442","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/442\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/442\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/442\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/442","id":666201810,"node_id":"MDU6SXNzdWU2NjYyMDE4MTA=","number":442,"title":"[Suggestion] Glue Diagnostic Data with Labels ","user":{"login":"ggbetz","id":3662782,"node_id":"MDQ6VXNlcjM2NjI3ODI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3662782?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ggbetz","html_url":"https:\/\/github.com\/ggbetz","followers_url":"https:\/\/api.github.com\/users\/ggbetz\/followers","following_url":"https:\/\/api.github.com\/users\/ggbetz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ggbetz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ggbetz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ggbetz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ggbetz\/orgs","repos_url":"https:\/\/api.github.com\/users\/ggbetz\/repos","events_url":"https:\/\/api.github.com\/users\/ggbetz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ggbetz\/received_events","type":"User","site_admin":false},"labels":[{"id":2067401494,"node_id":"MDU6TGFiZWwyMDY3NDAxNDk0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Dataset%20discussion","name":"Dataset discussion","color":"72f99f","default":false,"description":"Discussions on the datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-27T10:59:58Z","updated_at":"2020-08-24T15:13:20Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello! First of all, thanks for setting up this useful project!\r\n\r\nI've just realised you provide the the [Glue Diagnostics Data](https:\/\/huggingface.co\/nlp\/viewer\/?dataset=glue&config=ax) without labels, indicating in the `GlueConfig` that you've only a test set.\r\n\r\nYet, the data with labels is available, too (see also [here](https:\/\/gluebenchmark.com\/diagnostics#introduction)):\r\n\r\nhttps:\/\/www.dropbox.com\/s\/ju7d95ifb072q9f\/diagnostic-full.tsv?dl=1 \r\n\r\nHave you considered incorporating it?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/442\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/442\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/441","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/441\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/441\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/441\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/441","id":666148413,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU3MDQyMjY3","number":441,"title":"Add features parameter in load dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-27T09:50:01Z","updated_at":"2020-07-30T12:51:17Z","closed_at":"2020-07-30T12:51:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/441","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/441","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/441.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/441.patch","merged_at":"2020-07-30T12:51:16Z"},"body":"Added `features` argument in `nlp.load_dataset`.\r\nIf they don't match the data type, it raises a `ValueError`.\r\n\r\nIt's a draft PR because #440 needs to be merged first.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/441\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/441\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/440","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/440\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/440\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/440\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/440","id":666116823,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU3MDE2MjQy","number":440,"title":"Fix user specified features in map","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-27T09:04:26Z","updated_at":"2020-07-28T09:25:23Z","closed_at":"2020-07-28T09:25:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/440","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/440","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/440.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/440.patch","merged_at":"2020-07-28T09:25:22Z"},"body":"`.map` didn't keep the user specified features because of an issue in the writer.\r\nThe writer used to overwrite the user specified features with inferred features.\r\n\r\nI also added tests to make sure it doesn't happen again.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/440\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/440\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/439","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/439\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/439\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/439\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/439","id":665964673,"node_id":"MDU6SXNzdWU2NjU5NjQ2NzM=","number":439,"title":"Issues: Adding a FAISS or Elastic Search index to a Dataset","user":{"login":"nsankar","id":431890,"node_id":"MDQ6VXNlcjQzMTg5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/431890?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nsankar","html_url":"https:\/\/github.com\/nsankar","followers_url":"https:\/\/api.github.com\/users\/nsankar\/followers","following_url":"https:\/\/api.github.com\/users\/nsankar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nsankar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nsankar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nsankar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nsankar\/orgs","repos_url":"https:\/\/api.github.com\/users\/nsankar\/repos","events_url":"https:\/\/api.github.com\/users\/nsankar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nsankar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-07-27T04:25:17Z","updated_at":"2020-10-28T01:46:24Z","closed_at":"2020-10-28T01:46:24Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It seems the DPRContextEncoder, DPRContextEncoderTokenizer cited[ in this documentation](https:\/\/huggingface.co\/nlp\/faiss_and_ea.html) is not implemented ? It didnot work with the standard nlp installation . Also, I couldn't find or use it with the latest nlp install from github in Colab. Is there any dependency on the latest PyArrow 1.0.0 ? Is it yet to be made generally available ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/439\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/439\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/438","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/438\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/438\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/438\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/438","id":665865490,"node_id":"MDU6SXNzdWU2NjU4NjU0OTA=","number":438,"title":"New Datasets: IWSLT15+, ITTB","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-26T21:43:04Z","updated_at":"2020-08-24T15:12:15Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"**Links:**\r\n[iwslt](https:\/\/pytorchnlp.readthedocs.io\/en\/latest\/_modules\/torchnlp\/datasets\/iwslt.html)\r\nDon't know if that link is up to date.\r\n\r\n[ittb](http:\/\/www.cfilt.iitb.ac.in\/iitb_parallel\/)\r\n**Motivation**: replicate mbart finetuning results (table below)\r\n![image](https:\/\/user-images.githubusercontent.com\/6045025\/88490093-0c1c8c00-cf67-11ea-960d-8dcaad2aa8eb.png)\r\n\r\n\r\nFor future readers, we already have the following language pairs in the wmt namespaces:\r\n\r\n```\r\nwmt14: ['cs-en', 'de-en', 'fr-en', 'hi-en', 'ru-en']\r\nwmt15: ['cs-en', 'de-en', 'fi-en', 'fr-en', 'ru-en']\r\nwmt16: ['cs-en', 'de-en', 'fi-en', 'ro-en', 'ru-en', 'tr-en']\r\nwmt17: ['cs-en', 'de-en', 'fi-en', 'lv-en', 'ru-en', 'tr-en', 'zh-en']\r\nwmt18: ['cs-en', 'de-en', 'et-en', 'fi-en', 'kk-en', 'ru-en', 'tr-en', 'zh-en']\r\nwmt19: ['cs-en', 'de-en', 'fi-en', 'gu-en', 'kk-en', 'lt-en', 'ru-en', 'zh-en', 'fr-de']\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/438\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/438\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/437","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/437\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/437\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/437\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/437","id":665597176,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU2NjIzNjc3","number":437,"title":"Fix XTREME PAN-X loading","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-07-25T14:44:57Z","updated_at":"2020-07-30T08:28:15Z","closed_at":"2020-07-30T08:28:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/437","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/437","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/437.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/437.patch","merged_at":"2020-07-30T08:28:15Z"},"body":"Hi \ud83e\udd17 \r\nIn response to the discussion in #425 @lewtun and I made some fixes to the repo. In the original XTREME implementation the PAN-X dataset for named entity recognition loaded each word\/tag pair as a single row and the sentence relation was lost. With the fix each row contains the list of all words in a single sentence and their NER tags. This is also in agreement with the [NER example](https:\/\/github.com\/huggingface\/transformers\/tree\/master\/examples\/token-classification) in the transformers repo.\r\n\r\nWith the fix the output of the dataset should look as follows:\r\n```python\r\n>>> dataset = load_dataset(\"xtreme\", \"PAN-X.en\", data_dir='.\/data')\r\n>>> dataset['train'][0]\r\n{'words': ['R.H.', 'Saunders', '(', 'St.', 'Lawrence', 'River', ')', '(', '968', 'MW', ')'],\r\n 'ner_tags': ['B-ORG', 'I-ORG', 'O', 'B-ORG', 'I-ORG', 'I-ORG', 'O', 'O', 'O', 'O', 'O'],\r\n 'langs': ['en', 'en', 'en', 'en', 'en', 'en', 'en', 'en', 'en', 'en', 'en']}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/437\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/437\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/436","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/436\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/436\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/436\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/436","id":665582167,"node_id":"MDU6SXNzdWU2NjU1ODIxNjc=","number":436,"title":"Google Colab - load_dataset - PyArrow exception","user":{"login":"nsankar","id":431890,"node_id":"MDQ6VXNlcjQzMTg5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/431890?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nsankar","html_url":"https:\/\/github.com\/nsankar","followers_url":"https:\/\/api.github.com\/users\/nsankar\/followers","following_url":"https:\/\/api.github.com\/users\/nsankar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nsankar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nsankar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nsankar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nsankar\/orgs","repos_url":"https:\/\/api.github.com\/users\/nsankar\/repos","events_url":"https:\/\/api.github.com\/users\/nsankar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nsankar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-07-25T13:05:20Z","updated_at":"2020-08-20T08:08:18Z","closed_at":"2020-08-20T08:08:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"With latest PyArrow 1.0.0 installed, I get the following exception . Restarting colab has the same issue\r\n\r\nImportWarning: To use `nlp`, the module `pyarrow>=0.16.0` is required, and the current version of `pyarrow` doesn't match this condition. If you are running this in a Google Colab, you should probably just restart the runtime to use the right version of `pyarrow`.\r\n\r\nThe error goes only when I install version 0.16.0 \r\ni.e. !pip install pyarrow==0.16.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/436\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/436\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/435","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/435\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/435\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/435\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/435","id":665507141,"node_id":"MDU6SXNzdWU2NjU1MDcxNDE=","number":435,"title":"ImportWarning for pyarrow 1.0.0","user":{"login":"HanGuo97","id":18187806,"node_id":"MDQ6VXNlcjE4MTg3ODA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18187806?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/HanGuo97","html_url":"https:\/\/github.com\/HanGuo97","followers_url":"https:\/\/api.github.com\/users\/HanGuo97\/followers","following_url":"https:\/\/api.github.com\/users\/HanGuo97\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/HanGuo97\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/HanGuo97\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/HanGuo97\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/HanGuo97\/orgs","repos_url":"https:\/\/api.github.com\/users\/HanGuo97\/repos","events_url":"https:\/\/api.github.com\/users\/HanGuo97\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/HanGuo97\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-07-25T03:44:39Z","updated_at":"2020-09-08T17:57:15Z","closed_at":"2020-08-03T16:37:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The following PR raised ImportWarning at `pyarrow ==1.0.0` https:\/\/github.com\/huggingface\/nlp\/pull\/265\/files","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/435\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/435\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/434","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/434\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/434\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/434\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/434","id":665477638,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU2NTM3Njgz","number":434,"title":"Fixed check for pyarrow","user":{"login":"nadahlberg","id":58701810,"node_id":"MDQ6VXNlcjU4NzAxODEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/58701810?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nadahlberg","html_url":"https:\/\/github.com\/nadahlberg","followers_url":"https:\/\/api.github.com\/users\/nadahlberg\/followers","following_url":"https:\/\/api.github.com\/users\/nadahlberg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nadahlberg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nadahlberg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nadahlberg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nadahlberg\/orgs","repos_url":"https:\/\/api.github.com\/users\/nadahlberg\/repos","events_url":"https:\/\/api.github.com\/users\/nadahlberg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nadahlberg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-25T00:16:53Z","updated_at":"2020-07-25T06:36:34Z","closed_at":"2020-07-25T06:36:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/434","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/434","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/434.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/434.patch","merged_at":"2020-07-25T06:36:34Z"},"body":"Fix check for pyarrow in __init__.py. Previously would raise an error for pyarrow >= 1.0.0","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/434\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/434\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/433","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/433\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/433\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/433\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/433","id":665311025,"node_id":"MDU6SXNzdWU2NjUzMTEwMjU=","number":433,"title":"How to reuse functionality of a (generic) dataset?","user":{"login":"ArneBinder","id":3375489,"node_id":"MDQ6VXNlcjMzNzU0ODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3375489?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ArneBinder","html_url":"https:\/\/github.com\/ArneBinder","followers_url":"https:\/\/api.github.com\/users\/ArneBinder\/followers","following_url":"https:\/\/api.github.com\/users\/ArneBinder\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ArneBinder\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ArneBinder\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ArneBinder\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ArneBinder\/orgs","repos_url":"https:\/\/api.github.com\/users\/ArneBinder\/repos","events_url":"https:\/\/api.github.com\/users\/ArneBinder\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ArneBinder\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-24T17:27:37Z","updated_at":"2020-07-31T10:23:17Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I have written a generic dataset for corpora created with the Brat annotation tool ([specification](https:\/\/brat.nlplab.org\/standoff.html), [dataset code](https:\/\/github.com\/ArneBinder\/nlp\/blob\/brat\/datasets\/brat\/brat.py)). Now I wonder how to use that to create specific dataset instances. What's the recommended way to reuse formats and loading functionality for datasets with a common format?\r\n\r\nIn my case, it took a bit of time to create the Brat dataset and I think others would appreciate to not have to think about that again. Also, I assume there are other formats (e.g. conll) that are widely used, so having this would really ease dataset onboarding and adoption of the library.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/433\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/433\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/432","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/432\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/432\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/432\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/432","id":665234340,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU2MzQxNDk3","number":432,"title":"Fix handling of config files while loading datasets from multiple processes","user":{"login":"orsharir","id":99543,"node_id":"MDQ6VXNlcjk5NTQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/99543?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/orsharir","html_url":"https:\/\/github.com\/orsharir","followers_url":"https:\/\/api.github.com\/users\/orsharir\/followers","following_url":"https:\/\/api.github.com\/users\/orsharir\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/orsharir\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/orsharir\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/orsharir\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/orsharir\/orgs","repos_url":"https:\/\/api.github.com\/users\/orsharir\/repos","events_url":"https:\/\/api.github.com\/users\/orsharir\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/orsharir\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-07-24T15:10:57Z","updated_at":"2020-08-01T17:11:42Z","closed_at":"2020-07-30T08:25:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/432","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/432","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/432.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/432.patch","merged_at":"2020-07-30T08:25:28Z"},"body":"When loading shards on several processes, each process upon loading the dataset will overwrite dataset_infos.json in \/datasets\/\/\/dataset_infos.json. It does so every time, even when the target file already exists and is identical. Because multiple processes rewrite the same file in parallel, it creates a race condition when a process tries to load the file, often resulting in a JSON decoding exception because the file is only partially written.\r\n\r\nThis pull requests partially address this by comparing if the files are already identical before copying over the downloaded copy to the cached destination. There's still a race condition, but now it's less likely to occur if some basic precautions are taken by the library user, e.g., download all datasets to cache before spawning multiple processes.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/432\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/432\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/431","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/431\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/431\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/431\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/431","id":665044416,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU2MTgyNDE2","number":431,"title":"Specify split post processing + Add post processing resources downloading","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-07-24T09:29:19Z","updated_at":"2020-07-31T09:05:04Z","closed_at":"2020-07-31T09:05:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/431","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/431","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/431.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/431.patch","merged_at":"2020-07-31T09:05:03Z"},"body":"Previously if you tried to do\r\n\r\n```python\r\nfrom nlp import load_dataset\r\nwiki = load_dataset(\"wiki_dpr\", \"psgs_w100_with_nq_embeddings\", split=\"train[:100]\", with_index=True)\r\n```\r\nThen you'd get an error `Index size should match Dataset size...`\r\nThis was because it was trying to use the full index (21M elements).\r\n\r\nTo fix that I made it so post processing resources can be named according to the split.\r\n\r\nI'm going to add tests on post processing too.\r\n\r\nNote that the CI will fail as I added a new argument in `_post_processing_resources`: the AWS version of wiki_dpr fails, and there's also an error telling that it is not synced (it'll be synced once it's merged):\r\n```\r\n=========================== short test summary info ============================\r\nFAILED tests\/test_dataset_common.py::AWSDatasetTest::test_load_dataset_wiki_dpr\r\nFAILED tests\/test_hf_gcp.py::TestDatasetSynced::test_script_synced_with_s3_wiki_dpr\r\n```\r\n\r\nEDIT: I did a change to ignore the script hash to locate the arrow files on GCS, so I removed the sync test. It was there just because of the hash logic for files on GCS","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/431\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/431\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/430","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/430\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/430\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/430\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/430","id":664583837,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU1ODAxOTI2","number":430,"title":"add DatasetDict","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-07-23T15:43:49Z","updated_at":"2020-08-04T01:01:53Z","closed_at":"2020-07-29T09:06:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/430","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/430","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/430.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/430.patch","merged_at":"2020-07-29T09:06:22Z"},"body":"## Add DatasetDict\r\n\r\n### Overview\r\n\r\nWhen you call `load_dataset` it can return a dictionary of datasets if there are several splits (train\/test for example).\r\nIf you wanted to apply dataset transforms you had to iterate over each split and apply the transform.\r\n\r\nInstead of returning a dict, it now returns a `nlp.DatasetDict` object which inherits from dict and contains the same data as before, except that now users can call dataset transforms directly from the output, and they'll be applied on each split.\r\n\r\nBefore:\r\n```python\r\nfrom nlp import load_dataset\r\n\r\nsquad = load_dataset(\"squad\")\r\nprint(squad.keys())\r\n# dict_keys(['train', 'validation'])\r\nsquad = {\r\n split_name: dataset.map(my_func) for split_name, dataset in squad.items()\r\n}\r\nprint(squad.keys())\r\n# dict_keys(['train', 'validation'])\r\n```\r\n\r\nNow:\r\n```python\r\nfrom nlp import load_dataset\r\n\r\nsquad = load_dataset(\"squad\")\r\nprint(squad.keys())\r\n# dict_keys(['train', 'validation'])\r\nsquad = squad.map(my_func)\r\nprint(squad.keys())\r\n# dict_keys(['train', 'validation'])\r\n```\r\n\r\n### Dataset transforms\r\n\r\n`nlp.DatasetDict` implements the following dataset transforms:\r\n- map\r\n- filter\r\n- sort\r\n- shuffle\r\n\r\n### Arguments\r\n\r\nThe arguments of the methods are the same except for split-specific arguments like `cache_file_name`.\r\nFor such arguments, the expected input is a dictionary `{split_name: argument_value}`\r\nIt concerns:\r\n- `cache_file_name` in map, filter, sort, shuffle\r\n- `seed` and `generator` in shuffle","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/430\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/430\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/429","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/429\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/429\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/429\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/429","id":664412137,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU1NjU2MDk5","number":429,"title":"mlsum","user":{"login":"RachelKer","id":36986299,"node_id":"MDQ6VXNlcjM2OTg2Mjk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36986299?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RachelKer","html_url":"https:\/\/github.com\/RachelKer","followers_url":"https:\/\/api.github.com\/users\/RachelKer\/followers","following_url":"https:\/\/api.github.com\/users\/RachelKer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RachelKer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RachelKer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RachelKer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RachelKer\/orgs","repos_url":"https:\/\/api.github.com\/users\/RachelKer\/repos","events_url":"https:\/\/api.github.com\/users\/RachelKer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RachelKer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-07-23T11:52:39Z","updated_at":"2020-07-31T11:46:20Z","closed_at":"2020-07-31T11:46:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/429","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/429","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/429.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/429.patch","merged_at":null},"body":"Hello, \r\n\r\nThe tests for the load_real_data fail, as there is no default language subset to download it looks for a file that does not exist. This bug does not happen when using the load_dataset function, as it asks you to specify a language if you do not, so I submit this PR anyway. The dataset is avalaible on : https:\/\/gitlab.lip6.fr\/scialom\/mlsum_data","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/429\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/429\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/428","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/428\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/428\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/428\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/428","id":664367086,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU1NjE3Nzcy","number":428,"title":"fix concatenate_datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-23T10:30:59Z","updated_at":"2020-07-23T10:35:00Z","closed_at":"2020-07-23T10:34:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/428","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/428","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/428.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/428.patch","merged_at":"2020-07-23T10:34:58Z"},"body":"`concatenate_datatsets` used to test that the different`nlp.Dataset.schema` match, but this attribute was removed in #423 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/428\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/428\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/427","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/427\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/427\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/427\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/427","id":664341623,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU1NTk1Nzc3","number":427,"title":"Allow sequence features for beam + add processed Natural Questions","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-23T09:52:41Z","updated_at":"2020-07-23T13:09:30Z","closed_at":"2020-07-23T13:09:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/427","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/427","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/427.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/427.patch","merged_at":"2020-07-23T13:09:29Z"},"body":"## Allow Sequence features for Beam Datasets + add Natural Questions\r\n\r\n### The issue\r\n\r\nThe steps of beam datasets processing is the following:\r\n- download the source files and send them in a remote storage (gcs)\r\n- process the files using a beam runner (dataflow)\r\n- save output in remote storage (gcs)\r\n- convert output to arrow in remote storage (gcs)\r\n\r\nHowever it wasn't possible to process `natural_questions` because apache beam's processing outputs parquet files, and it's not yet possible to read parquet files with list features.\r\n\r\n### The proposed solution\r\n\r\nTo allow sequence features for beam I added a workaround that serializes the values using `json.dumps`, so that we end up with strings instead of the original features. Then when the arrow file is created, the serialized objects are transformed back to normal with `json.loads`. Not sure if there's a better way to do it.\r\n\r\n### Natural Questions\r\n\r\nI was able to process NQ with it, and so I added the json infos file in this PR too.\r\nThe processed arrow files are also stored in gcs.\r\nIt allows you to load NQ with\r\n\r\n```python\r\nfrom nlp import load_dataset\r\nnq = load_dataset(\"natural_questions\") # download the 90GB arrow files from gcs and return the dataset\r\n```\r\n\r\n### Tests\r\n\r\nI added a test case to make sure it works as expected.\r\nNote that the CI will fail because I am updating `natural_questions.py`: it's not synced with the script on S3. It will be synced as soon as this PR is merged.\r\n```\r\n=========================== short test summary info ============================\r\nFAILED tests\/test_hf_gcp.py::TestDatasetOnHfGcp::test_script_synced_with_s3_natural_questions\/default\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/427\/reactions","total_count":4,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":3,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/427\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/426","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/426\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/426\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/426\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/426","id":664203897,"node_id":"MDU6SXNzdWU2NjQyMDM4OTc=","number":426,"title":"[FEATURE REQUEST] Multiprocessing with for dataset.map, dataset.filter","user":{"login":"timothyjlaurent","id":2000204,"node_id":"MDQ6VXNlcjIwMDAyMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2000204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timothyjlaurent","html_url":"https:\/\/github.com\/timothyjlaurent","followers_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/followers","following_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/orgs","repos_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/repos","events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-07-23T05:00:41Z","updated_at":"2021-03-12T09:34:12Z","closed_at":"2020-09-07T14:48:04Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It would be nice to be able to speed up `dataset.map` or `dataset.filter`. Perhaps this is as easy as sharding the dataset sending each shard to a process\/thread\/dask pool and using the new `nlp.concatenate_dataset()` function to join them all together?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/426\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/426\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/425","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/425\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/425\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/425\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/425","id":664029848,"node_id":"MDU6SXNzdWU2NjQwMjk4NDg=","number":425,"title":"Correct data structure for PAN-X task in XTREME dataset?","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-07-22T20:29:20Z","updated_at":"2020-08-02T13:30:34Z","closed_at":"2020-08-02T13:30:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi \ud83e\udd17 team!\r\n\r\n## Description of the problem\r\nThanks to the fix from #416 I am now able to load the NER task in the XTREME dataset as follows:\r\n\r\n```python\r\nfrom nlp import load_dataset\r\n# AmazonPhotos.zip is located in data\/\r\ndataset = load_dataset(\"xtreme\", \"PAN-X.en\", data_dir='.\/data')\r\ndataset_train = dataset['train']\r\n```\r\n\r\nHowever, I am not sure that `load_dataset()` is returning the correct data structure for NER. \r\n\r\nCurrently, every row in `dataset_train` is of the form\r\n```python\r\n{'word': str, 'ner_tag': str, 'lang': str}\r\n```\r\nbut I think we actually want something like\r\n```python\r\n{'words': List[str], 'ner_tags': List[str], 'langs': List[str]}\r\n```\r\nso that each row corresponds to a _sequence_ of words associated with each example. With the current data structure I do not think it is possible to transform `dataset_train` into a form suitable for training because we do not know the boundaries between examples.\r\n\r\nIndeed, [this line](https:\/\/github.com\/google-research\/xtreme\/blob\/522434d1aece34131d997a97ce7e9242a51a688a\/third_party\/utils_tag.py#L58) in the XTREME repo, processes the texts as lists of sentences, tags, and languages.\r\n\r\n## Proposed solution\r\nReplace\r\n```python\r\nwith open(filepath) as f:\r\n data = csv.reader(f, delimiter=\"\\t\", quoting=csv.QUOTE_NONE)\r\n for id_, row in enumerate(data):\r\n if row:\r\n lang, word = row[0].split(\":\")[0], row[0].split(\":\")[1]\r\n tag = row[1]\r\n yield id_, {\"word\": word, \"ner_tag\": tag, \"lang\": lang}\r\n```\r\nfrom [these lines](https:\/\/github.com\/huggingface\/nlp\/blob\/ce7d3a1d630b78fe27188d1706f3ea980e8eec43\/datasets\/xtreme\/xtreme.py#L881-L887) of the `_generate_examples()` function with something like\r\n\r\n```python\r\nguid_index = 1\r\nwith open(filepath, encoding=\"utf-8\") as f:\r\n words = []\r\n ner_tags = []\r\n langs = []\r\n for line in f:\r\n if line.startswith(\"-DOCSTART-\") or line == \"\" or line == \"\\n\":\r\n if words:\r\n yield guid_index, {\"words\": words, \"ner_tags\": ner_tags, \"langs\": langs}\r\n guid_index += 1\r\n words = []\r\n ner_tags = []\r\n else:\r\n # pan-x data is tab separated\r\n splits = line.split(\"\\t\")\r\n # strip out en: prefix\r\n langs.append(splits[0][:2])\r\n words.append(splits[0][3:])\r\n if len(splits) > 1:\r\n labels.append(splits[-1].replace(\"\\n\", \"\"))\r\n else:\r\n # examples have no label in test set\r\n labels.append(\"O\")\r\n```\r\nIf you agree, me or @lvwerra would be happy to implement this and create a PR.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/425\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/425\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/424","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/424\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/424\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/424\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/424","id":663858552,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU1MTk4MTY0","number":424,"title":"Web of science","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-22T15:38:31Z","updated_at":"2020-07-23T14:27:58Z","closed_at":"2020-07-23T14:27:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/424","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/424","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/424.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/424.patch","merged_at":"2020-07-23T14:27:56Z"},"body":"this PR adds the WebofScience dataset\r\n#353 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/424\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/424\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/423","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/423\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/423\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/423\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/423","id":663079359,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU0NTU4OTA0","number":423,"title":"Change features vs schema logic","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-21T14:52:47Z","updated_at":"2020-07-25T09:08:34Z","closed_at":"2020-07-23T10:15:17Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/423","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/423","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/423.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/423.patch","merged_at":"2020-07-23T10:15:16Z"},"body":"## New logic for `nlp.Features` in datasets\r\n\r\nPreviously, it was confusing to have `features` and pyarrow's `schema` in `nlp.Dataset`.\r\nHowever `features` is supposed to be the front-facing object to define the different fields of a dataset, while `schema` is only used to write arrow files.\r\n\r\nChanges:\r\n- Remove `schema` field in `nlp.Dataset`\r\n- Make `features` the source of truth to read\/write examples\r\n- `features` can no longer be `None` in `nlp.Dataset`\r\n- Update `features` after each dataset transform such as `nlp.Dataset.map`\r\n\r\nTodo: change the tests to take these changes into account","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/423\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/423\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/422","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/422\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/422\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/422\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/422","id":663028497,"node_id":"MDExOlB1bGxSZXF1ZXN0NDU0NTE3MDU2","number":422,"title":"- Corrected encoding for IMDB.","user":{"login":"ghazi-f","id":25091538,"node_id":"MDQ6VXNlcjI1MDkxNTM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25091538?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghazi-f","html_url":"https:\/\/github.com\/ghazi-f","followers_url":"https:\/\/api.github.com\/users\/ghazi-f\/followers","following_url":"https:\/\/api.github.com\/users\/ghazi-f\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghazi-f\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghazi-f\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghazi-f\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghazi-f\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghazi-f\/repos","events_url":"https:\/\/api.github.com\/users\/ghazi-f\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghazi-f\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-21T13:46:59Z","updated_at":"2020-07-22T16:02:53Z","closed_at":"2020-07-22T16:02:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/422","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/422","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/422.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/422.patch","merged_at":"2020-07-22T16:02:53Z"},"body":"The preparation phase (after the download phase) crashed on windows because of charmap encoding not being able to decode certain characters. This change suggested in Issue #347 fixes it for the IMDB dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/422\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/422\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/421","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/421\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/421\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/421\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/421","id":662213864,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUzNzkzMzQ1","number":421,"title":"Style change","user":{"login":"lordtt13","id":35500534,"node_id":"MDQ6VXNlcjM1NTAwNTM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35500534?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lordtt13","html_url":"https:\/\/github.com\/lordtt13","followers_url":"https:\/\/api.github.com\/users\/lordtt13\/followers","following_url":"https:\/\/api.github.com\/users\/lordtt13\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lordtt13\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lordtt13\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lordtt13\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lordtt13\/orgs","repos_url":"https:\/\/api.github.com\/users\/lordtt13\/repos","events_url":"https:\/\/api.github.com\/users\/lordtt13\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lordtt13\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-20T20:08:29Z","updated_at":"2020-07-22T16:08:40Z","closed_at":"2020-07-22T16:08:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/421","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/421","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/421.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/421.patch","merged_at":null},"body":"make quality and make style ran on scripts","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/421\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/421\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/420","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/420\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/420\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/420\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/420","id":662029782,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUzNjI5OTk2","number":420,"title":"Better handle nested features","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-20T16:44:13Z","updated_at":"2020-07-21T08:20:49Z","closed_at":"2020-07-21T08:09:52Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/420","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/420","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/420.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/420.patch","merged_at":"2020-07-21T08:09:51Z"},"body":"Changes:\r\n- added arrow schema to features conversion (it's going to be useful to fix #342 )\r\n- make flatten handle deep features (useful for tfrecords conversion in #339 )\r\n- add tests for flatten and features conversions\r\n- the reader now returns the kwargs to instantiate a Dataset (fix circular dependencies)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/420\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/420\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/419","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/419\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/419\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/419\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/419","id":661974747,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUzNTgxNzQz","number":419,"title":"EmoContext dataset add","user":{"login":"lordtt13","id":35500534,"node_id":"MDQ6VXNlcjM1NTAwNTM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35500534?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lordtt13","html_url":"https:\/\/github.com\/lordtt13","followers_url":"https:\/\/api.github.com\/users\/lordtt13\/followers","following_url":"https:\/\/api.github.com\/users\/lordtt13\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lordtt13\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lordtt13\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lordtt13\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lordtt13\/orgs","repos_url":"https:\/\/api.github.com\/users\/lordtt13\/repos","events_url":"https:\/\/api.github.com\/users\/lordtt13\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lordtt13\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-20T15:48:45Z","updated_at":"2020-07-24T08:22:01Z","closed_at":"2020-07-24T08:22:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/419","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/419","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/419.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/419.patch","merged_at":"2020-07-24T08:22:00Z"},"body":"EmoContext Dataset add\r\n\r\nSigned-off-by: lordtt13 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/419\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/419\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/418","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/418\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/418\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/418\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/418","id":661914873,"node_id":"MDU6SXNzdWU2NjE5MTQ4NzM=","number":418,"title":"Addition of google drive links to dl_manager","user":{"login":"lordtt13","id":35500534,"node_id":"MDQ6VXNlcjM1NTAwNTM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35500534?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lordtt13","html_url":"https:\/\/github.com\/lordtt13","followers_url":"https:\/\/api.github.com\/users\/lordtt13\/followers","following_url":"https:\/\/api.github.com\/users\/lordtt13\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lordtt13\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lordtt13\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lordtt13\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lordtt13\/orgs","repos_url":"https:\/\/api.github.com\/users\/lordtt13\/repos","events_url":"https:\/\/api.github.com\/users\/lordtt13\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lordtt13\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-20T14:52:02Z","updated_at":"2020-07-20T15:39:32Z","closed_at":"2020-07-20T15:39:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello there, I followed the template to create a download script of my own, which works fine for me, although I had to shun the dl_manager because it was downloading nothing from the drive links and instead use gdown.\r\n\r\nThis is the script for me:\r\n\r\n```python\r\nclass EmoConfig(nlp.BuilderConfig):\r\n \"\"\"BuilderConfig for SQUAD.\"\"\"\r\n\r\n def __init__(self, **kwargs):\r\n \"\"\"BuilderConfig for EmoContext.\r\n Args:\r\n **kwargs: keyword arguments forwarded to super.\r\n \"\"\"\r\n super(EmoConfig, self).__init__(**kwargs)\r\n\r\n_TEST_URL = \"https:\/\/drive.google.com\/file\/d\/1Hn5ytHSSoGOC4sjm3wYy0Dh0oY_oXBbb\/view?usp=sharing\"\r\n_TRAIN_URL = \"https:\/\/drive.google.com\/file\/d\/12Uz59TYg_NtxOy7SXraYeXPMRT7oaO7X\/view?usp=sharing\"\r\n\r\nclass EmoDataset(nlp.GeneratorBasedBuilder):\r\n \"\"\" SemEval-2019 Task 3: EmoContext Contextual Emotion Detection in Text. Version 1.0.0 \"\"\"\r\n\r\n VERSION = nlp.Version(\"1.0.0\")\r\n force = False\r\n\r\n def _info(self):\r\n return nlp.DatasetInfo(\r\n description=_DESCRIPTION,\r\n features=nlp.Features(\r\n {\r\n \"text\": nlp.Value(\"string\"),\r\n \"label\": nlp.features.ClassLabel(names=[\"others\", \"happy\", \"sad\", \"angry\"]),\r\n }\r\n ),\r\n supervised_keys=None,\r\n homepage=\"https:\/\/www.aclweb.org\/anthology\/S19-2005\/\",\r\n citation=_CITATION,\r\n )\r\n \r\n def _get_drive_url(self, url):\r\n base_url = 'https:\/\/drive.google.com\/uc?id='\r\n split_url = url.split('\/')\r\n return base_url + split_url[5]\r\n \r\n def _split_generators(self, dl_manager):\r\n \"\"\"Returns SplitGenerators.\"\"\"\r\n if(not os.path.exists(\"emo-train.json\") or self.force):\r\n gdown.download(self._get_drive_url(_TRAIN_URL), \"emo-train.json\", quiet = True)\r\n if(not os.path.exists(\"emo-test.json\") or self.force):\r\n gdown.download(self._get_drive_url(_TEST_URL), \"emo-test.json\", quiet = True)\r\n return [\r\n nlp.SplitGenerator(\r\n name=nlp.Split.TRAIN,\r\n gen_kwargs={\r\n \"filepath\": \"emo-train.json\",\r\n \"split\": \"train\",\r\n },\r\n ),\r\n nlp.SplitGenerator(\r\n name=nlp.Split.TEST,\r\n gen_kwargs={\"filepath\": \"emo-test.json\", \"split\": \"test\"},\r\n ),\r\n ]\r\n\r\n def _generate_examples(self, filepath, split):\r\n \"\"\" Yields examples. \"\"\"\r\n with open(filepath, 'rb') as f:\r\n data = json.load(f)\r\n for id_, text, label in zip(data[\"text\"].keys(), data[\"text\"].values(), data[\"Label\"].values()):\r\n yield id_, {\r\n \"text\": text,\r\n \"label\": label,\r\n }\r\n```\r\n\r\nCan someone help me in adding gdrive links to be used with default dl_manager or adding gdown as another dl_manager, because I'd like to add this dataset to nlp's official database.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/418\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/418\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/417","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/417\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/417\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/417\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/417","id":661804054,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUzNDMyODE5","number":417,"title":"Fix docstrins multiple metrics instances","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-20T13:08:59Z","updated_at":"2020-07-22T09:51:00Z","closed_at":"2020-07-22T09:50:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/417","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/417","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/417.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/417.patch","merged_at":"2020-07-22T09:50:58Z"},"body":"We change the docstrings of `nlp.Metric.compute`, `nlp.Metric.add` and `nlp.Metric.add_batch` depending on which metric is instantiated. However we had issues when instantiating multiple metrics (docstrings were duplicated).\r\n\r\nThis should fix #304 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/417\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/417\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/416","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/416\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/416\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/416\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/416","id":661635393,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUzMjg1NTM4","number":416,"title":"Fix xtreme panx directory","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-20T10:09:17Z","updated_at":"2020-07-21T08:15:46Z","closed_at":"2020-07-21T08:15:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/416","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/416","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/416.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/416.patch","merged_at":"2020-07-21T08:15:44Z"},"body":"Fix #412 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/416\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/416\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/415","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/415\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/415\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/415\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/415","id":660687076,"node_id":"MDU6SXNzdWU2NjA2ODcwNzY=","number":415,"title":"Something is wrong with WMT 19 kk-en dataset","user":{"login":"ChenghaoMou","id":32014649,"node_id":"MDQ6VXNlcjMyMDE0NjQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32014649?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ChenghaoMou","html_url":"https:\/\/github.com\/ChenghaoMou","followers_url":"https:\/\/api.github.com\/users\/ChenghaoMou\/followers","following_url":"https:\/\/api.github.com\/users\/ChenghaoMou\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ChenghaoMou\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ChenghaoMou\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ChenghaoMou\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ChenghaoMou\/orgs","repos_url":"https:\/\/api.github.com\/users\/ChenghaoMou\/repos","events_url":"https:\/\/api.github.com\/users\/ChenghaoMou\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ChenghaoMou\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-19T08:18:51Z","updated_at":"2020-07-20T09:54:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The translation in the `train` set does not look right:\r\n\r\n```\r\n>>>import nlp\r\n>>>from nlp import load_dataset\r\n>>>dataset = load_dataset('wmt19', 'kk-en')\r\n>>>dataset[\"train\"][\"translation\"][0]\r\n{'kk': 'Trumpian Uncertainty', 'en': '\u0422\u0440\u0430\u043c\u043f\u0442\u044b\u049b \u0431\u0435\u043b\u0433\u0456\u0441\u0456\u0437\u0434\u0456\u043a'}\r\n>>>dataset[\"validation\"][\"translation\"][0]\r\n{'kk': '\u0410\u049b\u0448\u0430-\u043d\u0435\u0441\u0438\u0435 \u0441\u0430\u044f\u0441\u0430\u0442\u044b\u043d\u044b\u04a3 \u0441\u0446\u0435\u043d\u0430\u0440\u0438\u0439\u0456\u043d \u049b\u0430\u0439\u0442\u0430 \u0436\u0430\u0437\u0441\u0430\u049b', 'en': 'Rewriting the Monetary-Policy Script'}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/415\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":1,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/415\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/414","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/414\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/414\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/414\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/414","id":660654013,"node_id":"MDU6SXNzdWU2NjA2NTQwMTM=","number":414,"title":"from_dict delete?","user":{"login":"hackerxiaobai","id":22817243,"node_id":"MDQ6VXNlcjIyODE3MjQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22817243?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hackerxiaobai","html_url":"https:\/\/github.com\/hackerxiaobai","followers_url":"https:\/\/api.github.com\/users\/hackerxiaobai\/followers","following_url":"https:\/\/api.github.com\/users\/hackerxiaobai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hackerxiaobai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hackerxiaobai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hackerxiaobai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hackerxiaobai\/orgs","repos_url":"https:\/\/api.github.com\/users\/hackerxiaobai\/repos","events_url":"https:\/\/api.github.com\/users\/hackerxiaobai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hackerxiaobai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-19T07:08:36Z","updated_at":"2020-07-21T02:21:17Z","closed_at":"2020-07-21T02:21:17Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"AttributeError: type object 'Dataset' has no attribute 'from_dict'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/414\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/414\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/413","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/413\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/413\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/413\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/413","id":660063655,"node_id":"MDU6SXNzdWU2NjAwNjM2NTU=","number":413,"title":"Is there a way to download only NQ dev?","user":{"login":"tholor","id":1563902,"node_id":"MDQ6VXNlcjE1NjM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1563902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tholor","html_url":"https:\/\/github.com\/tholor","followers_url":"https:\/\/api.github.com\/users\/tholor\/followers","following_url":"https:\/\/api.github.com\/users\/tholor\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tholor\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tholor\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tholor\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tholor\/orgs","repos_url":"https:\/\/api.github.com\/users\/tholor\/repos","events_url":"https:\/\/api.github.com\/users\/tholor\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tholor\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-18T10:28:23Z","updated_at":"2022-02-11T09:50:21Z","closed_at":"2022-02-11T09:50:21Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Maybe I missed that in the docs, but is there a way to only download the dev set of natural questions (~1 GB)? \r\nAs we want to benchmark QA models on different datasets, I would like to avoid downloading the 41GB of training data. \r\n\r\nI tried\r\n```\r\ndataset = nlp.load_dataset('natural_questions', split=\"validation\", beam_runner=\"DirectRunner\")\r\n```\r\nBut this still triggered a big download of presumably the whole dataset. Is there any way of doing this or are splits \/ slicing options only available after downloading?\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/413\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/413\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/412","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/412\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/412\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/412\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/412","id":660047139,"node_id":"MDU6SXNzdWU2NjAwNDcxMzk=","number":412,"title":"Unable to load XTREME dataset from disk","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-18T09:55:00Z","updated_at":"2020-07-21T08:15:44Z","closed_at":"2020-07-21T08:15:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi \ud83e\udd17 team!\r\n\r\n## Description of the problem\r\nFollowing the [docs](https:\/\/huggingface.co\/nlp\/loading_datasets.html?highlight=xtreme#manually-downloading-files) I'm trying to load the `PAN-X.fr` dataset from the [XTREME](https:\/\/github.com\/google-research\/xtreme) benchmark.\r\n\r\nI have manually downloaded the `AmazonPhotos.zip` file from [here](https:\/\/www.amazon.com\/clouddrive\/share\/d3KGCRCIYwhKJF0H3eWA26hjg2ZCRhjpEQtDL70FSBN?_encoding=UTF8&%2AVersion%2A=1&%2Aentries%2A=0&mgh=1) and am running into a `FileNotFoundError` when I point to the location of the dataset.\r\n\r\nAs far as I can tell, the problem is that `AmazonPhotos.zip` decompresses to `panx_dataset` and `load_dataset()` is not looking in the correct path:\r\n\r\n```\r\n# path where load_dataset is looking for fr.tar.gz\r\n\/root\/.cache\/huggingface\/datasets\/9b8c4f1578e45cb2539332c79738beb3b54afbcd842b079cabfd79e3ed6704f6\/\r\n# path where it actually exists\r\n\/root\/.cache\/huggingface\/datasets\/9b8c4f1578e45cb2539332c79738beb3b54afbcd842b079cabfd79e3ed6704f6\/panx_dataset\/\r\n```\r\n\r\n## Steps to reproduce the problem\r\n\r\n1. Manually download the XTREME benchmark from [here](https:\/\/www.amazon.com\/clouddrive\/share\/d3KGCRCIYwhKJF0H3eWA26hjg2ZCRhjpEQtDL70FSBN?_encoding=UTF8&%2AVersion%2A=1&%2Aentries%2A=0&mgh=1)\r\n\r\n2. Run the following code snippet\r\n```python\r\nfrom nlp import load_dataset\r\n# AmazonPhotos.zip is in the root of the folder\r\ndataset = load_dataset(\"xtreme\", \"PAN-X.fr\", data_dir='.\/')\r\n```\r\n\r\n3. Here is the stack trace\r\n```\r\n---------------------------------------------------------------------------\r\nFileNotFoundError Traceback (most recent call last)\r\n in \r\n----> 1 dataset = load_dataset(\"xtreme\", \"PAN-X.fr\", data_dir='.\/')\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n 522 download_mode=download_mode,\r\n 523 ignore_verifications=ignore_verifications,\r\n--> 524 save_infos=save_infos,\r\n 525 )\r\n 526 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n 430 verify_infos = not save_infos and not ignore_verifications\r\n 431 self._download_and_prepare(\r\n--> 432 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 433 )\r\n 434 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 464 split_dict = SplitDict(dataset_name=self.name)\r\n 465 split_generators_kwargs = self._make_split_generators_kwargs(prepare_split_kwargs)\r\n--> 466 split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n 467 # Checksums verification\r\n 468 if verify_infos:\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/datasets\/xtreme\/b8c2ed3583a7a7ac60b503576dfed3271ac86757628897e945bd329c43b8a746\/xtreme.py in _split_generators(self, dl_manager)\r\n 725 panx_dl_dir = dl_manager.extract(panx_path)\r\n 726 lang = self.config.name.split(\".\")[1]\r\n--> 727 lang_folder = dl_manager.extract(os.path.join(panx_dl_dir, lang + \".tar.gz\"))\r\n 728 return [\r\n 729 nlp.SplitGenerator(\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/utils\/download_manager.py in extract(self, path_or_paths)\r\n 196 \"\"\"\r\n 197 return map_nested(\r\n--> 198 lambda path: cached_path(path, extract_compressed_file=True, force_extract=False), path_or_paths,\r\n 199 )\r\n 200 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/utils\/py_utils.py in map_nested(function, data_struct, dict_only, map_tuple)\r\n 170 return tuple(mapped)\r\n 171 # Singleton\r\n--> 172 return function(data_struct)\r\n 173 \r\n 174 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/utils\/download_manager.py in (path)\r\n 196 \"\"\"\r\n 197 return map_nested(\r\n--> 198 lambda path: cached_path(path, extract_compressed_file=True, force_extract=False), path_or_paths,\r\n 199 )\r\n 200 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/utils\/file_utils.py in cached_path(url_or_filename, download_config, **download_kwargs)\r\n 203 elif urlparse(url_or_filename).scheme == \"\":\r\n 204 # File, but it doesn't exist.\r\n--> 205 raise FileNotFoundError(\"Local file {} doesn't exist\".format(url_or_filename))\r\n 206 else:\r\n 207 # Something unknown\r\n\r\nFileNotFoundError: Local file \/root\/.cache\/huggingface\/datasets\/9b8c4f1578e45cb2539332c79738beb3b54afbcd842b079cabfd79e3ed6704f6\/fr.tar.gz doesn't exist\r\n```\r\n\r\n## OS and hardware\r\n```\r\n- `nlp` version: 0.3.0\r\n- Platform: Linux-4.15.0-72-generic-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.6.9\r\n- PyTorch version (GPU?): 1.4.0 (True)\r\n- Tensorflow version (GPU?): 2.1.0 (True)\r\n- Using GPU in script?: \r\n- Using distributed or parallel set-up in script?: \r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/412\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/412\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/411","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/411\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/411\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/411\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/411","id":659393398,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUxMjQxOTQy","number":411,"title":"Sbf","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-17T16:19:45Z","updated_at":"2020-07-21T09:13:46Z","closed_at":"2020-07-21T09:13:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/411","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/411","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/411.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/411.patch","merged_at":"2020-07-21T09:13:45Z"},"body":"This PR adds the Social Bias Frames Dataset (ACL 2020) .\r\ndataset homepage: https:\/\/homes.cs.washington.edu\/~msap\/social-bias-frames\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/411\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/411\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/410","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/410\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/410\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/410\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/410","id":659242871,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUxMTEzMTI3","number":410,"title":"20newsgroup","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-17T13:07:57Z","updated_at":"2020-07-20T07:05:29Z","closed_at":"2020-07-20T07:05:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/410","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/410","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/410.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/410.patch","merged_at":"2020-07-20T07:05:28Z"},"body":"Add 20Newsgroup dataset.\r\n#353 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/410\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/410\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/409","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/409\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/409\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/409\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/409","id":659128611,"node_id":"MDU6SXNzdWU2NTkxMjg2MTE=","number":409,"title":"train_test_split error: 'dict' object has no attribute 'deepcopy'","user":{"login":"morganmcg1","id":20516801,"node_id":"MDQ6VXNlcjIwNTE2ODAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20516801?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/morganmcg1","html_url":"https:\/\/github.com\/morganmcg1","followers_url":"https:\/\/api.github.com\/users\/morganmcg1\/followers","following_url":"https:\/\/api.github.com\/users\/morganmcg1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/morganmcg1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/morganmcg1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/morganmcg1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/morganmcg1\/orgs","repos_url":"https:\/\/api.github.com\/users\/morganmcg1\/repos","events_url":"https:\/\/api.github.com\/users\/morganmcg1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/morganmcg1\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2020-07-17T10:36:28Z","updated_at":"2020-07-21T14:34:52Z","closed_at":"2020-07-21T14:34:52Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`train_test_split` is giving me an error when I try and call it:\r\n\r\n`'dict' object has no attribute 'deepcopy'`\r\n\r\n## To reproduce\r\n\r\n```\r\ndataset = load_dataset('glue', 'mrpc', split='train')\r\ndataset = dataset.train_test_split(test_size=0.2)\r\n```\r\n\r\n## Full Stacktrace\r\n```\r\n---------------------------------------------------------------------------\r\nAttributeError Traceback (most recent call last)\r\n in \r\n 1 dataset = load_dataset('glue', 'mrpc', split='train')\r\n----> 2 dataset = dataset.train_test_split(test_size=0.2)\r\n\r\n~\/anaconda3\/envs\/fastai2_me\/lib\/python3.7\/site-packages\/nlp\/arrow_dataset.py in train_test_split(self, test_size, train_size, shuffle, seed, generator, keep_in_memory, load_from_cache_file, train_cache_file_name, test_cache_file_name, writer_batch_size)\r\n 1032 \"writer_batch_size\": writer_batch_size,\r\n 1033 }\r\n-> 1034 train_kwargs = cache_kwargs.deepcopy()\r\n 1035 train_kwargs[\"split\"] = \"train\"\r\n 1036 test_kwargs = cache_kwargs.deepcopy()\r\n\r\nAttributeError: 'dict' object has no attribute 'deepcopy'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/409\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/409\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/408","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/408\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/408\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/408\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/408","id":659064144,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUwOTU1MTE0","number":408,"title":"Add tests datasets gcp","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-17T09:23:27Z","updated_at":"2020-07-17T09:26:57Z","closed_at":"2020-07-17T09:26:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/408","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/408","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/408.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/408.patch","merged_at":"2020-07-17T09:26:56Z"},"body":"Some datasets are available on our google cloud storage in arrow format, so that the users don't need to process the data.\r\nThese tests make sure that they're always available. It also makes sure that their scripts are in sync between S3 and the repo.\r\nThis should avoid future issues like #407 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/408\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/408\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/407","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/407\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/407\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/407\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/407","id":658672736,"node_id":"MDU6SXNzdWU2NTg2NzI3MzY=","number":407,"title":"MissingBeamOptions for Wikipedia 20200501.en","user":{"login":"mitchellgordon95","id":7490438,"node_id":"MDQ6VXNlcjc0OTA0Mzg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7490438?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mitchellgordon95","html_url":"https:\/\/github.com\/mitchellgordon95","followers_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/followers","following_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/orgs","repos_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/repos","events_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2020-07-16T23:48:03Z","updated_at":"2021-01-12T11:41:16Z","closed_at":"2020-07-17T14:24:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"There may or may not be a regression for the pre-processed Wikipedia dataset. This was working fine 10 commits ago (without having Apache Beam available):\r\n\r\n```\r\nnlp.load_dataset('wikipedia', \"20200501.en\", split='train')\r\n```\r\n\r\nAnd now, having pulled master, I get:\r\n\r\n```\r\nDownloading and preparing dataset wikipedia\/20200501.en (download: 16.99 GiB, generated: 17.07 GiB, total: 34.06 GiB) to \/home\/hltcoe\/mgordon\/.cache\/huggingface\/datasets\/wikipedia\/20200501.en\/1.0.0\/76b0b2747b679bb0ee7a1621e50e5a6378477add0c662668a324a5bc07d516dd...\r\nTraceback (most recent call last):\r\n File \"scripts\/download.py\", line 11, in \r\n fire.Fire(download_pretrain)\r\n File \"\/home\/hltcoe\/mgordon\/.conda\/envs\/huggingface\/lib\/python3.6\/site-packages\/fire\/core.py\", line 138, in Fire\r\n component_trace = _Fire(component, args, parsed_flag_args, context, name)\r\n File \"\/home\/hltcoe\/mgordon\/.conda\/envs\/huggingface\/lib\/python3.6\/site-packages\/fire\/core.py\", line 468, in _Fire\r\n target=component.__name__)\r\n File \"\/home\/hltcoe\/mgordon\/.conda\/envs\/huggingface\/lib\/python3.6\/site-packages\/fire\/core.py\", line 672, in _CallAndUpdateTrace\r\n component = fn(*varargs, **kwargs)\r\n File \"scripts\/download.py\", line 6, in download_pretrain\r\n nlp.load_dataset('wikipedia', \"20200501.en\", split='train')\r\n File \"\/exp\/mgordon\/nlp\/src\/nlp\/load.py\", line 534, in load_dataset\r\n save_infos=save_infos,\r\n File \"\/exp\/mgordon\/nlp\/src\/nlp\/builder.py\", line 460, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/exp\/mgordon\/nlp\/src\/nlp\/builder.py\", line 870, in _download_and_prepare\r\n \"\\n\\t`{}`\".format(usage_example)\r\nnlp.builder.MissingBeamOptions: Trying to generate a dataset using Apache Beam, yet no Beam Runner or PipelineOptions() has been provided in `load_dataset` or in the builder arguments. For big datasets it has to run on large-scale data processing tools like Dataflow, S\r\npark, etc. More information about Apache Beam runners at https:\/\/beam.apache.org\/documentation\/runners\/capability-matrix\/\r\nIf you really want to run it locally because you feel like the Dataset is small enough, you can use the local beam runner called `DirectRunner` (you may run out of memory).\r\nExample of usage:\r\n `load_dataset('wikipedia', '20200501.en', beam_runner='DirectRunner')`\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/407\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/407\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/406","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/406\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/406\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/406\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/406","id":658581764,"node_id":"MDU6SXNzdWU2NTg1ODE3NjQ=","number":406,"title":"Faster Shuffling?","user":{"login":"mitchellgordon95","id":7490438,"node_id":"MDQ6VXNlcjc0OTA0Mzg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7490438?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mitchellgordon95","html_url":"https:\/\/github.com\/mitchellgordon95","followers_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/followers","following_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/orgs","repos_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/repos","events_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-07-16T21:21:53Z","updated_at":"2020-09-07T14:45:26Z","closed_at":"2020-09-07T14:45:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Consider shuffling bookcorpus:\r\n\r\n```\r\ndataset = nlp.load_dataset('bookcorpus', split='train')\r\ndataset.shuffle()\r\n```\r\nAccording to tqdm, this will take around 2.5 hours on my machine to complete (even with the faster version of select from #405). I've also tried with `keep_in_memory=True` and `writer_batch_size=1000`.\r\n\r\nBut I can also just write the lines to a text file:\r\n\r\n```\r\nbatch_size = 100000\r\nwith open('tmp.txt', 'w+') as out_f:\r\n for i in tqdm(range(0, len(dataset), batch_size)):\r\n batch = dataset[i:i+batch_size]['text']\r\n print(\"\\n\".join(batch), file=out_f)\r\n```\r\n\r\nWhich completes in a couple minutes, followed by `shuf tmp.txt > tmp2.txt` which completes in under a minute. And finally,\r\n\r\n```\r\ndataset = nlp.load_dataset('text', data_files='tmp2.txt')\r\n```\r\n\r\nWhich completes in under 10 minutes. I read up on Apache Arrow this morning, and it seems like the columnar data format is not especially well-suited to shuffling rows, since moving items around requires a lot of book-keeping. \r\n\r\nIs shuffle inherently slow, or am I just using it wrong? And if it is slow, would it make sense to try converting the data to a row-based format on disk and then shuffling? (Instead of calling select with a random permutation, as is currently done.)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/406\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/406\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/405","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/405\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/405\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/405\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/405","id":658580192,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUwNTI1MTc3","number":405,"title":"Make select() faster by batching reads","user":{"login":"mitchellgordon95","id":7490438,"node_id":"MDQ6VXNlcjc0OTA0Mzg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7490438?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mitchellgordon95","html_url":"https:\/\/github.com\/mitchellgordon95","followers_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/followers","following_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/orgs","repos_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/repos","events_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-16T21:19:45Z","updated_at":"2020-07-17T17:05:44Z","closed_at":"2020-07-17T16:51:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/405","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/405","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/405.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/405.patch","merged_at":"2020-07-17T16:51:26Z"},"body":"Here's a benchmark:\r\n\r\n```\r\ndataset = nlp.load_dataset('bookcorpus', split='train')\r\n\r\nstart = time.time()\r\ndataset.select(np.arange(1000), reader_batch_size=1, load_from_cache_file=False)\r\nend = time.time()\r\nprint(f'{end - start}')\r\n\r\nstart = time.time()\r\ndataset.select(np.arange(1000), reader_batch_size=1000, load_from_cache_file=False)\r\nend = time.time()\r\nprint(f'{end - start}')\r\n```\r\n\r\nWithout batching, select takes around 1.27 seconds. With batching, it takes around 0.01 seconds. The slowness was upsetting me because dataset.shuffle() was supposed to take ~27 hours for bookcorpus. Now with the fix it takes ~2.5 hours (which still is pretty slow, but I'll open a separate issue for that).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/405\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/405\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/404","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/404\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/404\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/404\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/404","id":658400987,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUwMzY4Mjg4","number":404,"title":"Add seed in metrics","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-16T17:27:05Z","updated_at":"2020-07-20T10:12:35Z","closed_at":"2020-07-20T10:12:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/404","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/404","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/404.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/404.patch","merged_at":"2020-07-20T10:12:34Z"},"body":"With #361 we noticed that some metrics were not deterministic.\r\nIn this PR I allow the user to specify numpy's seed when instantiating a metric with `load_metric`.\r\nThe seed is set only when `compute` is called, and reset afterwards.\r\n\r\nMoreover when calling `compute` with the same metric instance (i.e. same experiment_id), the metric will always return the same results given the same inputs. This is the case even if the seed is was not specified by the user, as the previous seed is going to be reused.\r\n\r\nHowever, instantiating twice a metric (two different experiments) without specifying a seed can create different results.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/404\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/404\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/403","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/403\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/403\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/403\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/403","id":658325756,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUwMzAzNjI2","number":403,"title":"return python objects instead of arrays by default","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-16T15:51:52Z","updated_at":"2020-07-17T11:37:01Z","closed_at":"2020-07-17T11:37:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/403","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/403","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/403.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/403.patch","merged_at":"2020-07-17T11:37:00Z"},"body":"We were using to_pandas() to convert from arrow types, however it returns numpy arrays instead of python lists.\r\nI fixed it by using to_pydict\/to_pylist instead.\r\n\r\nFix #387 \r\nIt was mentioned in https:\/\/github.com\/huggingface\/transformers\/issues\/5729\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/403\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/403\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/402","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/402\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/402\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/402\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/402","id":658001288,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUwMDI2NTE0","number":402,"title":"Search qa","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-16T09:00:10Z","updated_at":"2020-07-16T14:27:00Z","closed_at":"2020-07-16T14:26:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/402","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/402","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/402.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/402.patch","merged_at":"2020-07-16T14:26:59Z"},"body":"add SearchQA dataset\r\n\r\n#336 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/402\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/402\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/401","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/401\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/401\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/401\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/401","id":657996252,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUwMDIyNTc0","number":401,"title":"add web_questions","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-16T08:54:59Z","updated_at":"2020-08-06T06:16:20Z","closed_at":"2020-08-06T06:16:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/401","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/401","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/401.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/401.patch","merged_at":"2020-08-06T06:16:19Z"},"body":"add Web Question dataset\r\n#336 \r\n\r\nMaybe @patrickvonplaten you can help with the dummy_data structure? it still broken","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/401\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/401\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/400","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/400\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/400\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/400\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/400","id":657975600,"node_id":"MDExOlB1bGxSZXF1ZXN0NDUwMDA1MDU5","number":400,"title":"Web questions","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-16T08:28:29Z","updated_at":"2020-07-16T08:50:51Z","closed_at":"2020-07-16T08:42:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/400","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/400","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/400.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/400.patch","merged_at":null},"body":"add the WebQuestion dataset\r\n#336 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/400\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/400\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/399","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/399\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/399\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/399\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/399","id":657841433,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ5ODkxNTEy","number":399,"title":"Spelling mistake","user":{"login":"BlancRay","id":9410067,"node_id":"MDQ6VXNlcjk0MTAwNjc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9410067?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BlancRay","html_url":"https:\/\/github.com\/BlancRay","followers_url":"https:\/\/api.github.com\/users\/BlancRay\/followers","following_url":"https:\/\/api.github.com\/users\/BlancRay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BlancRay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BlancRay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BlancRay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BlancRay\/orgs","repos_url":"https:\/\/api.github.com\/users\/BlancRay\/repos","events_url":"https:\/\/api.github.com\/users\/BlancRay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BlancRay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-16T04:37:58Z","updated_at":"2020-07-16T06:49:48Z","closed_at":"2020-07-16T06:49:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/399","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/399","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/399.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/399.patch","merged_at":"2020-07-16T06:49:37Z"},"body":"In \"Formatting the dataset\" part, \"The two toehr modifications...\" should be \"The two other modifications...\" ,the word \"other\" wrong spelled as \"toehr\".","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/399\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/399\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/398","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/398\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/398\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/398\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/398","id":657511962,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ5NjE1OTk1","number":398,"title":"Add inline links","user":{"login":"Bharat123rox","id":13381361,"node_id":"MDQ6VXNlcjEzMzgxMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13381361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Bharat123rox","html_url":"https:\/\/github.com\/Bharat123rox","followers_url":"https:\/\/api.github.com\/users\/Bharat123rox\/followers","following_url":"https:\/\/api.github.com\/users\/Bharat123rox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Bharat123rox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Bharat123rox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Bharat123rox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Bharat123rox\/orgs","repos_url":"https:\/\/api.github.com\/users\/Bharat123rox\/repos","events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-15T17:04:04Z","updated_at":"2020-07-22T10:14:22Z","closed_at":"2020-07-22T10:14:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/398","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/398","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/398.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/398.patch","merged_at":"2020-07-22T10:14:22Z"},"body":"Add inline links to `Contributing.md`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/398\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/398\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/397","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/397\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/397\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/397\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/397","id":657510856,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ5NjE1MDA4","number":397,"title":"Add contiguous sharding","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-15T17:02:58Z","updated_at":"2020-07-17T16:59:31Z","closed_at":"2020-07-17T16:59:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/397","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/397","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/397.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/397.patch","merged_at":"2020-07-17T16:59:30Z"},"body":"This makes dset.shard() play nice with nlp.concatenate_datasets(). When I originally wrote the shard() method, I was thinking about a distributed training scenario, but https:\/\/github.com\/huggingface\/nlp\/pull\/389 also uses it for splitting the dataset for distributed preprocessing.\r\n\r\nUsage:\r\n```\r\nnlp.concatenate_datasets([dset.shard(n, i, contiguous=True) for i in range(n)])\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/397\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/397\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/396","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/396\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/396\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/396\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/396","id":657477952,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ5NTg3MDQ4","number":396,"title":"Fix memory issue when doing select","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-15T16:15:04Z","updated_at":"2020-07-16T08:07:32Z","closed_at":"2020-07-16T08:07:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/396","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/396","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/396.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/396.patch","merged_at":"2020-07-16T08:07:30Z"},"body":"We were passing the `nlp.Dataset` object to get the hash for the new dataset's file name.\r\nFix #395 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/396\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/396\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/395","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/395\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/395\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/395\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/395","id":657454983,"node_id":"MDU6SXNzdWU2NTc0NTQ5ODM=","number":395,"title":"Memory issue when doing select","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2020-07-15T15:43:38Z","updated_at":"2020-07-16T08:07:31Z","closed_at":"2020-07-16T08:07:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As noticed in #389, the following code loads the entire wikipedia in memory.\r\n\r\n```python\r\nimport nlp\r\nw = nlp.load_dataset(\"wikipedia\", \"20200501.en\", split=\"train\")\r\nw.select([0])\r\n```\r\n\r\nThis is caused by [this line](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/src\/nlp\/arrow_dataset.py#L626) for some reason, that tries to serialize the function with all the wikipedia data with it.\r\n\r\nIt's not the case with `.map` or `.filter`.\r\nHowever functions that are based on `.select` like `.shuffle`, `.shard`, `.train_test_split`, `.sort` are affected.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/395\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/395\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/394","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/394\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/394\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/394\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/394","id":657425548,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ5NTQzNTE0","number":394,"title":"Remove remaining nested dict","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-15T15:05:52Z","updated_at":"2020-07-16T07:39:52Z","closed_at":"2020-07-16T07:39:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/394","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/394","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/394.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/394.patch","merged_at":"2020-07-16T07:39:51Z"},"body":"This PR deletes the remaining unnecessary nested dict \r\n#378 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/394\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/394\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/393","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/393\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/393\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/393\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/393","id":657330911,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ5NDY1MTAz","number":393,"title":"Fix extracted files directory for the DownloadManager","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-15T12:59:55Z","updated_at":"2020-07-17T17:02:16Z","closed_at":"2020-07-17T17:02:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/393","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/393","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/393.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/393.patch","merged_at":"2020-07-17T17:02:14Z"},"body":"The cache dir was often cluttered by extracted files because of the download manager.\r\n\r\nFor downloaded files, we are using the `downloads` directory to make things easier to navigate, but extracted files were still placed at the root of the cache directory. To fix that I changed the directory for extracted files to cache_dir\/downloads\/extracted.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/393\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/393\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/392","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/392\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/392\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/392\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/392","id":657313738,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ5NDUwOTkx","number":392,"title":"Style change detection","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-15T12:32:14Z","updated_at":"2020-07-21T13:18:36Z","closed_at":"2020-07-17T17:13:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/392","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/392","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/392.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/392.patch","merged_at":"2020-07-17T17:13:23Z"},"body":"Another [PAN task](https:\/\/pan.webis.de\/clef20\/pan20-web\/style-change-detection.html). This time about identifying when the style\/author changes in documents.\r\n\r\n- There's the possibility of adding the [PAN19](https:\/\/zenodo.org\/record\/3577602) and PAN18 style change detection tasks too (these are datasets whose labels are a subset of PAN20's). These would probably make more sense as separate datasets (like wmt is now)\r\n- I've converted the integer 0,1 values to a boolean\r\n- Using manually downloaded data again. This might be changed at some point following the discussion in https:\/\/github.com\/huggingface\/nlp\/pull\/349.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/392\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/392\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/391","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/391\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/391\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/391\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/391","id":656991432,"node_id":"MDU6SXNzdWU2NTY5OTE0MzI=","number":391,"title":"\ud83c\udf1f [Metric Request] WOOD score","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[{"id":2459308248,"node_id":"MDU6TGFiZWwyNDU5MzA4MjQ4","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20request","name":"metric request","color":"d4c5f9","default":false,"description":"Requesting to add a new metric"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-15T01:16:37Z","updated_at":"2020-10-27T15:43:28Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"WOOD score paper : https:\/\/arxiv.org\/pdf\/2007.06898.pdf\r\n\r\nAbstract :\r\n\r\n>Models that surpass human performance on several popular benchmarks display significant degradation in performance on exposure to Out of Distribution (OOD) data. Recent research has shown that models overfit to spurious biases and \u2018hack\u2019 datasets, in lieu of learning generalizable features like humans. In order to stop the inflation in model performance \u2013 and thus overestimation in AI systems\u2019 capabilities \u2013 we propose a simple and novel evaluation metric, WOOD Score, that encourages generalization during evaluation.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/391\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/391\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/390","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/390\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/390\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/390\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/390","id":656956384,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ5MTYxMzY3","number":390,"title":"Concatenate datasets","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-07-14T23:24:37Z","updated_at":"2020-07-22T09:49:58Z","closed_at":"2020-07-22T09:49:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/390","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/390","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/390.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/390.patch","merged_at":"2020-07-22T09:49:58Z"},"body":"I'm constructing the \"WikiBooks\" dataset, which is a concatenation of Wikipedia & BookCorpus. So I implemented the `Dataset.from_concat()` method, which concatenates two datasets with the same schema.\r\n\r\nThis would also be useful if someone wants to pretrain on a large generic dataset + their own custom dataset. Not in love with the method name, so would love to hear suggestions.\r\n\r\nUsage:\r\n```python\r\nfrom nlp import Dataset, load_dataset\r\n\r\ndata1, data2 = {\"id\": [0, 1, 2]}, {\"id\": [3, 4, 5]}\r\ndset1, dset2 = Dataset.from_dict(data1), Dataset.from_dict(data2)\r\ndset_concat = Dataset.from_concat([dset1, dset2])\r\nprint(dset_concat)\r\n# Dataset(schema: {'id': 'int64'}, num_rows: 6)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/390\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/390\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/389","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/389\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/389\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/389\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/389","id":656921768,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ5MTMyOTU5","number":389,"title":"Fix pickling of SplitDict","user":{"login":"mitchellgordon95","id":7490438,"node_id":"MDQ6VXNlcjc0OTA0Mzg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7490438?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mitchellgordon95","html_url":"https:\/\/github.com\/mitchellgordon95","followers_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/followers","following_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/orgs","repos_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/repos","events_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mitchellgordon95\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-07-14T21:53:39Z","updated_at":"2020-08-04T14:38:10Z","closed_at":"2020-08-04T14:38:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/389","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/389","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/389.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/389.patch","merged_at":null},"body":"It would be nice to pickle and unpickle Datasets, as done in [this tutorial](https:\/\/github.com\/patil-suraj\/exploring-T5\/blob\/master\/T5_on_TPU.ipynb). Example:\r\n\r\n```\r\nwiki = nlp.load_dataset('wikipedia', split='train')\r\ndef sentencize(examples):\r\n ...\r\n\r\nwiki = wiki.map(sentencize, batched=True)\r\ntorch.save(wiki, 'sentencized_wiki_dataset.pt')\r\n```\r\n\r\nHowever, upon unpickling the dataset via torch.load(...), this error is raised:\r\n\r\n```\r\nValueError(\"Cannot add elem. Use .add() instead.\")\r\n```\r\nOn line [492 of splits.py](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/src\/nlp\/splits.py#L492). This is because SplitDict subclasses dict, and pickle treats [dicts specially](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/src\/nlp\/splits.py#L492). Pickle expects access to `dict.__setitem__`, but this is disallowed by the class.\r\n\r\nThe workaround is to provide an explicit interface for pickle to call when pickling and unpickling, thereby avoiding the use of `__setitem__`.\r\n\r\nTesting:\r\n- Manually pickled and unpickled a modified wikipedia dataset.\r\n- Ran `make style`\r\n\r\nI would be happy to run any other tests, but I couldn't find any in the contributing guidelines.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/389\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/389\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/388","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/388\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/388\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/388\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/388","id":656707497,"node_id":"MDU6SXNzdWU2NTY3MDc0OTc=","number":388,"title":"\ud83d\udc1b [Dataset] Cannot download wmt14, wmt15 and wmt17","user":{"login":"SamuelCahyawijaya","id":2826602,"node_id":"MDQ6VXNlcjI4MjY2MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2826602?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya","html_url":"https:\/\/github.com\/SamuelCahyawijaya","followers_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/followers","following_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/orgs","repos_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/repos","events_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SamuelCahyawijaya\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2020-07-14T15:36:41Z","updated_at":"2020-08-05T14:56:32Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"1. I try downloading `wmt14`, `wmt15`, `wmt17`, `wmt19` with the following code:\r\n```\r\nnlp.load_dataset('wmt14','de-en')\r\nnlp.load_dataset('wmt15','de-en')\r\nnlp.load_dataset('wmt17','de-en')\r\nnlp.load_dataset('wmt19','de-en')\r\n```\r\nThe code runs but the download speed is **extremely slow**, the same behaviour is not observed on `wmt16` and `wmt18`\r\n\r\n2. When trying to download `wmt17 zh-en`, I got the following error:\r\n> ConnectionError: Couldn't reach https:\/\/storage.googleapis.com\/tfdataset-data\/downloadataset\/uncorpus\/UNv1.0.en-zh.tar.gz","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/388\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/388\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/387","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/387\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/387\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/387\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/387","id":656361357,"node_id":"MDU6SXNzdWU2NTYzNjEzNTc=","number":387,"title":"Conversion through to_pandas output numpy arrays for lists instead of python objects","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-14T06:24:01Z","updated_at":"2020-07-17T11:37:00Z","closed_at":"2020-07-17T11:37:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In a related question, the conversion through to_pandas output numpy arrays for the lists instead of python objects.\r\n\r\nHere is an example:\r\n```python\r\n>>> dataset._data.slice(key, 1).to_pandas().to_dict(\"list\")\r\n{'sentence1': ['Amrozi accused his brother , whom he called \" the witness \" , of deliberately distorting his evidence .'], 'sentence2': ['Referring to him as only \" the witness \" , Amrozi accused his brother of deliberately distorting his evidence .'], 'label': [1], 'idx': [0], 'input_ids': [array([ 101, 7277, 2180, 5303, 4806, 1117, 1711, 117, 2292,\r\n 1119, 1270, 107, 1103, 7737, 107, 117, 1104, 9938,\r\n 4267, 12223, 21811, 1117, 2554, 119, 102])], 'token_type_ids': [array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\r\n 0, 0, 0])], 'attention_mask': [array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\r\n 1, 1, 1])]}\r\n>>> type(dataset._data.slice(key, 1).to_pandas().to_dict(\"list\")['input_ids'][0])\r\n\r\n>>> dataset._data.slice(key, 1).to_pydict()\r\n{'sentence1': ['Amrozi accused his brother , whom he called \" the witness \" , of deliberately distorting his evidence .'], 'sentence2': ['Referring to him as only \" the witness \" , Amrozi accused his brother of deliberately distorting his evidence .'], 'label': [1], 'idx': [0], 'input_ids': [[101, 7277, 2180, 5303, 4806, 1117, 1711, 117, 2292, 1119, 1270, 107, 1103, 7737, 107, 117, 1104, 9938, 4267, 12223, 21811, 1117, 2554, 119, 102]], 'token_type_ids': [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], 'attention_mask': [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/387\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/387\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/386","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/386\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/386\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/386\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/386","id":655839067,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ4MjQ1NDI4","number":386,"title":"Update dataset loading and features - Add TREC dataset","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-13T13:10:18Z","updated_at":"2020-07-16T08:17:58Z","closed_at":"2020-07-16T08:17:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/386","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/386","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/386.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/386.patch","merged_at":"2020-07-16T08:17:58Z"},"body":"This PR:\r\n- add a template for a new dataset script\r\n- update the caching structure so that the path to the cached data files is also a function of the dataset loading script hash. This way when you update a loading script the data will be automatically updated instead of falling back to the previous version (which is usually a outdated). This makes it in particular easier to iterate when writing a new dataset loading script.\r\n- fix a bug in the `ClassLabel` feature and make it more flexible so that its methods `str2int` and `int2str` can also accept list, numpy arrays and PyTorch\/TensorFlow tensors.\r\n- add the TREC-6 dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/386\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/386\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/385","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/385\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/385\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/385\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/385","id":655663997,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ4MTAzMjY5","number":385,"title":"Remove unnecessary nested dict","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-07-13T08:46:23Z","updated_at":"2020-07-15T11:27:38Z","closed_at":"2020-07-15T10:03:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/385","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/385","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/385.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/385.patch","merged_at":"2020-07-15T10:03:53Z"},"body":"This PR is removing unnecessary nested dictionary used in some datasets. For now the following datasets are updated:\r\n\r\n- MLQA\r\n\r\n- RACE\r\n\r\nWill be adding more if necessary.\r\n\r\n#378 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/385\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/385\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/383","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/383\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/383\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/383\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/383","id":655291201,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ3ODI0OTky","number":383,"title":"Adding the Linguistic Code-switching Evaluation (LinCE) benchmark","user":{"login":"gaguilar","id":5833357,"node_id":"MDQ6VXNlcjU4MzMzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5833357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gaguilar","html_url":"https:\/\/github.com\/gaguilar","followers_url":"https:\/\/api.github.com\/users\/gaguilar\/followers","following_url":"https:\/\/api.github.com\/users\/gaguilar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gaguilar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gaguilar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gaguilar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gaguilar\/orgs","repos_url":"https:\/\/api.github.com\/users\/gaguilar\/repos","events_url":"https:\/\/api.github.com\/users\/gaguilar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gaguilar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-07-11T22:35:20Z","updated_at":"2020-07-16T16:19:46Z","closed_at":"2020-07-16T16:19:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/383","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/383","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/383.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/383.patch","merged_at":"2020-07-16T16:19:46Z"},"body":"Hi,\r\n\r\nFirst of all, this library is really cool! Thanks for putting all of this together! \r\n\r\nThis PR contains the [Linguistic Code-switching Evaluation (LinCE) benchmark](https:\/\/ritual.uh.edu\/lince). As described in the official website (FAQ):\r\n\r\n> 1. Why do we need LinCE?\r\n>LinCE brings 10 code-switching datasets together for 4 tasks and 4 language pairs with 5 leaderboards in a single evaluation platform. We examined each dataset and fixed major issues on the partitions (or even define official partitions) with a comprehensive stratification method (see our paper for more details).\r\n>Besides, we believe that online benchmarks like LinCE bring steady research progress and allow to compare state-of-the-art models at the pace of the progress in NLP. We expect to benefit greatly the code-switching community with this benchmark.\r\n\r\n\r\nThe data comes from social media and here's the summary table of tasks per language pair:\r\n\r\n| Language Pairs | LID | POS | NER | SA |\r\n|----------------------------------------|-----|-----|-----|----|\r\n| Spanish-English | \u2705 | \u2705 | \u2705 | \u2705 |\r\n| Hindi-English | \u2705 | \u2705 | \u2705 | |\r\n| Modern Standard Arabic-Egyptian Arabic | \u2705 | | \u2705 | |\r\n| Nepali-English | \u2705 | | | |\r\n\r\nThe tasks are as follows:\r\n* LID: token-level language identification\r\n* POS: part-of-speech tagging\r\n* NER: named entity recognition\r\n* SA: sentiment analysis\r\n\r\nWith the exception of MSA-EA, the rest of the datasets contain token-level LID labels.\r\n\r\n## Usage\r\n\r\nFor Spanish-English LID, we can load the data as follows:\r\n```\r\nimport nlp\r\n\r\ndata = nlp.load_dataset('.\/datasets\/lince\/lince.py', 'lid_spaeng')\r\n\r\nfor split in data:\r\n print(data[split])\r\n```\r\n\r\nHere's the output:\r\n```\r\nDataset(schema: {'idx': 'int32', 'tokens': 'list', 'lid': 'list'}, num_rows: 21030)\r\nDataset(schema: {'idx': 'int32', 'tokens': 'list', 'lid': 'list'}, num_rows: 3332)\r\nDataset(schema: {'idx': 'int32', 'tokens': 'list', 'lid': 'list'}, num_rows: 8289)\r\n```\r\n\r\nHere's the list of shortcut names for every dataset available in LinCE:\r\n* `lid_spaeng`\r\n* `lid_hineng`\r\n* `lid_nepeng`\r\n* `lid_msaea`\r\n* `pos_spaeng`\r\n* `pos_hineng`\r\n* `ner_spaeng`\r\n* `ner_hineng`\r\n* `ner_msaea`\r\n* `sa_spaeng`\r\n\r\n\r\nAll the numbers match with Table 3 in the LinCE [paper](https:\/\/www.aclweb.org\/anthology\/2020.lrec-1.223.pdf). Also, note that the MSA-EA datasets use the Persian script while the other datasets use the Roman script.\r\n\r\n\r\n## Features\r\n\r\nHere is how the features look in the case of language identification (LID) tasks:\r\n\r\n| LID Feature | Type | Description |\r\n|----------------------|---------------|-------------------------------------------|\r\n| `idx` | `int` | Dataset index of current sentence |\r\n| `tokens` | `list` | List of tokens (string) of a sentence |\r\n| `lid` | `list` | List of LID labels (string) of a sentence |\r\n\r\nFor part-of-speech (POS) tagging:\r\n\r\n| POS Feature | Type | Description |\r\n|----------------------|---------------|-------------------------------------------|\r\n| `idx` | `int` | Dataset index of current sentence |\r\n| `tokens` | `list` | List of tokens (string) of a sentence |\r\n| `lid` | `list` | List of LID labels (string) of a sentence |\r\n| `pos` | `list` | List of POS tags (string) of a sentence |\r\n\r\nFor named entity recognition (NER):\r\n\r\n| NER Feature | Type | Description |\r\n|----------------------|---------------|-------------------------------------------|\r\n| `idx` | `int` | Dataset index of current sentence |\r\n| `tokens` | `list` | List of tokens (string) of a sentence |\r\n| `lid` | `list` | List of LID labels (string) of a sentence |\r\n| `ner` | `list` | List of NER labels (string) of a sentence |\r\n\r\n**NOTE**: the MSA-EA NER dataset does not contain the `lid` feature.\r\n\r\nFor sentiment analysis (SA):\r\n\r\n| SA Feature | Type | Description |\r\n|---------------------|-------------|-------------------------------------------|\r\n| `idx` | `int` | Dataset index of current sentence |\r\n| `tokens` | `list` | List of tokens (string) of a sentence |\r\n| `lid` | `list` | List of LID labels (string) of a sentence |\r\n| `sa` | `str` | Sentiment label (string) of a sentence |\r\n\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/383\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/383\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/382","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/382\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/382\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/382\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/382","id":655290482,"node_id":"MDU6SXNzdWU2NTUyOTA0ODI=","number":382,"title":"1080","user":{"login":"saq194","id":60942503,"node_id":"MDQ6VXNlcjYwOTQyNTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/60942503?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/saq194","html_url":"https:\/\/github.com\/saq194","followers_url":"https:\/\/api.github.com\/users\/saq194\/followers","following_url":"https:\/\/api.github.com\/users\/saq194\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/saq194\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/saq194\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/saq194\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/saq194\/orgs","repos_url":"https:\/\/api.github.com\/users\/saq194\/repos","events_url":"https:\/\/api.github.com\/users\/saq194\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/saq194\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-11T22:29:07Z","updated_at":"2020-07-11T22:49:38Z","closed_at":"2020-07-11T22:49:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/382\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/382\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/381","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/381\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/381\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/381\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/381","id":655277119,"node_id":"MDU6SXNzdWU2NTUyNzcxMTk=","number":381,"title":"NLp","user":{"login":"Spartanthor","id":68147610,"node_id":"MDQ6VXNlcjY4MTQ3NjEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68147610?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Spartanthor","html_url":"https:\/\/github.com\/Spartanthor","followers_url":"https:\/\/api.github.com\/users\/Spartanthor\/followers","following_url":"https:\/\/api.github.com\/users\/Spartanthor\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Spartanthor\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Spartanthor\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Spartanthor\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Spartanthor\/orgs","repos_url":"https:\/\/api.github.com\/users\/Spartanthor\/repos","events_url":"https:\/\/api.github.com\/users\/Spartanthor\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Spartanthor\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-11T20:50:14Z","updated_at":"2020-07-11T20:50:39Z","closed_at":"2020-07-11T20:50:39Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/381\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/381\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/378","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/378\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/378\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/378\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/378","id":655226316,"node_id":"MDU6SXNzdWU2NTUyMjYzMTY=","number":378,"title":"[dataset] Structure of MLQA seems unecessary nested","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-11T15:16:08Z","updated_at":"2020-07-15T16:17:20Z","closed_at":"2020-07-15T16:17:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The features of the MLQA dataset comprise several nested dictionaries with a single element inside (for `questions` and `ids`): https:\/\/github.com\/huggingface\/nlp\/blob\/master\/datasets\/mlqa\/mlqa.py#L90-L97\r\n\r\nShould we keep this @mariamabarham @patrickvonplaten? Was this added for compatibility with tfds?\r\n\r\n```python\r\n features=nlp.Features(\r\n {\r\n \"context\": nlp.Value(\"string\"),\r\n \"questions\": nlp.features.Sequence({\"question\": nlp.Value(\"string\")}),\r\n \"answers\": nlp.features.Sequence(\r\n {\"text\": nlp.Value(\"string\"), \"answer_start\": nlp.Value(\"int32\"),}\r\n ),\r\n \"ids\": nlp.features.Sequence({\"idx\": nlp.Value(\"string\")})\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/378\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/378\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/377","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/377\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/377\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/377\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/377","id":655215790,"node_id":"MDU6SXNzdWU2NTUyMTU3OTA=","number":377,"title":"Iyy!!!","user":{"login":"ajinomoh","id":68154535,"node_id":"MDQ6VXNlcjY4MTU0NTM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/68154535?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ajinomoh","html_url":"https:\/\/github.com\/ajinomoh","followers_url":"https:\/\/api.github.com\/users\/ajinomoh\/followers","following_url":"https:\/\/api.github.com\/users\/ajinomoh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ajinomoh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ajinomoh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ajinomoh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ajinomoh\/orgs","repos_url":"https:\/\/api.github.com\/users\/ajinomoh\/repos","events_url":"https:\/\/api.github.com\/users\/ajinomoh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ajinomoh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-11T14:11:07Z","updated_at":"2020-07-11T14:30:51Z","closed_at":"2020-07-11T14:30:51Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/377\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/377\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/376","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/376\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/376\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/376\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/376","id":655047826,"node_id":"MDU6SXNzdWU2NTUwNDc4MjY=","number":376,"title":"to_pandas conversion doesn't always work","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-10T21:33:31Z","updated_at":"2020-07-20T10:10:45Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"For some complex nested types, the conversion from Arrow to python dict through pandas doesn't seem to be possible.\r\n\r\nHere is an example using the official SQUAD v2 JSON file.\r\n\r\nThis example was found while investigating #373.\r\n\r\n```python\r\n>>> squad = load_dataset('json', data_files={nlp.Split.TRAIN: [\".\/train-v2.0.json\"]}, download_mode=nlp.GenerateMode.FORCE_REDOWNLOAD, version=\"1.0.0\", field='data')\r\n>>> squad['train']\r\nDataset(schema: {'title': 'string', 'paragraphs': 'list>, is_impossible: bool, plausible_answers: list>>>, context: string>>'}, num_rows: 442)\r\n>>> squad['train'][0]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/thomwolf\/Documents\/GitHub\/datasets\/src\/nlp\/arrow_dataset.py\", line 589, in __getitem__\r\n format_kwargs=self._format_kwargs,\r\n File \"\/Users\/thomwolf\/Documents\/GitHub\/datasets\/src\/nlp\/arrow_dataset.py\", line 529, in _getitem\r\n outputs = self._unnest(self._data.slice(key, 1).to_pandas().to_dict(\"list\"))\r\n File \"pyarrow\/array.pxi\", line 559, in pyarrow.lib._PandasConvertible.to_pandas\r\n File \"pyarrow\/table.pxi\", line 1367, in pyarrow.lib.Table._to_pandas\r\n File \"\/Users\/thomwolf\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/pandas_compat.py\", line 766, in table_to_blockmanager\r\n blocks = _table_to_blocks(options, table, categories, ext_columns_dtypes)\r\n File \"\/Users\/thomwolf\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/pandas_compat.py\", line 1101, in _table_to_blocks\r\n list(extension_columns.keys()))\r\n File \"pyarrow\/table.pxi\", line 881, in pyarrow.lib.table_to_blocks\r\n File \"pyarrow\/error.pxi\", line 105, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowNotImplementedError: Not implemented type for Arrow list to pandas: struct>, is_impossible: bool, plausible_answers: list>>>, context: string>\r\n```\r\n\r\ncc @lhoestq would we have a way to detect this from the schema maybe?\r\n\r\nHere is the schema for this pretty complex JSON:\r\n```python\r\n>>> squad['train'].schema\r\ntitle: string\r\nparagraphs: list>, is_impossible: bool, plausible_answers: list>>>, context: string>>\r\n child 0, item: struct>, is_impossible: bool, plausible_answers: list>>>, context: string>\r\n child 0, qas: list>, is_impossible: bool, plausible_answers: list>>>\r\n child 0, item: struct>, is_impossible: bool, plausible_answers: list>>\r\n child 0, question: string\r\n child 1, id: string\r\n child 2, answers: list>\r\n child 0, item: struct\r\n child 0, text: string\r\n child 1, answer_start: int64\r\n child 3, is_impossible: bool\r\n child 4, plausible_answers: list>\r\n child 0, item: struct\r\n child 0, text: string\r\n child 1, answer_start: int64\r\n child 1, context: string\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/376\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/376\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/375","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/375\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/375\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/375\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/375","id":655023307,"node_id":"MDU6SXNzdWU2NTUwMjMzMDc=","number":375,"title":"TypeError when computing bertscore","user":{"login":"willywsm1013","id":13269577,"node_id":"MDQ6VXNlcjEzMjY5NTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13269577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/willywsm1013","html_url":"https:\/\/github.com\/willywsm1013","followers_url":"https:\/\/api.github.com\/users\/willywsm1013\/followers","following_url":"https:\/\/api.github.com\/users\/willywsm1013\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/willywsm1013\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/willywsm1013\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/willywsm1013\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/willywsm1013\/orgs","repos_url":"https:\/\/api.github.com\/users\/willywsm1013\/repos","events_url":"https:\/\/api.github.com\/users\/willywsm1013\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/willywsm1013\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-10T20:37:44Z","updated_at":"2020-09-07T14:50:12Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, \r\n\r\nI installed nlp 0.3.0 via pip, and my python version is 3.7.\r\nWhen I tried to compute bertscore with the code:\r\n```\r\nimport nlp \r\nbertscore = nlp.load_metric('bertscore') \r\n# load hyps and refs \r\n...\r\nprint (bertscore.compute(hyps, refs, lang='en'))\r\n```\r\n\r\nI got the following error.\r\n```\r\nTraceback (most recent call last):\r\n File \"bert_score_evaluate.py\", line 16, in \r\n print (bertscore.compute(hyps, refs, lang='en'))\r\n File \"\/home\/willywsm\/anaconda3\/envs\/torcher\/lib\/python3.7\/site-packages\/nlp\/metric.py\", line 200, in compute\r\n output = self._compute(predictions=predictions, references=references, **metrics_kwargs)\r\n File \"\/home\/willywsm\/anaconda3\/envs\/torcher\/lib\/python3.7\/site-packages\/nlp\/metrics\/bertscore\/fb176889831bf0ce995ed197edc94b2e9a83f647a869bb8c9477dbb2d04d0f08\/bertscore.py\", line 105, in _compute\r\n hashcode = bert_score.utils.get_hash(model_type, num_layers, idf, rescale_with_baseline)\r\nTypeError: get_hash() takes 3 positional arguments but 4 were given\r\n```\r\n\r\nIt seems like there is something wrong with get_hash() function?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/375\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/375\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/374","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/374\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/374\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/374\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/374","id":654895066,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ3NTMxMzUy","number":374,"title":"Add dataset post processing for faiss indexes","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-10T16:25:59Z","updated_at":"2020-07-13T13:44:03Z","closed_at":"2020-07-13T13:44:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/374","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/374","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/374.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/374.patch","merged_at":"2020-07-13T13:44:01Z"},"body":"# Post processing of datasets for faiss indexes\r\n\r\nNow that we can have datasets with embeddings (see `wiki_pr` for example), we can allow users to load the dataset + get the Faiss index that comes with it to do nearest neighbors queries.\r\n\r\n## Implementation proposition\r\n\r\n- Faiss indexes have to be added to the `nlp.Dataset` object, and therefore it's in a different scope that what are doing the `_split_generators` and `_generate_examples` methods of `nlp.DatasetBuilder`. Therefore I added a new method for post processing of the `nlp.Dataset` object called `_post_process` (name could change)\r\n- The role of `_post_process` is to apply dataset transforms (filter\/map etc.) or indexing functions (add_faiss_index) to modify\/enrich the `nlp.Dataset` object. It is not part of the `download_and_prepare` process (that is focused on arrow files creation) so the post processing is run inside the `as_dataset` method.\r\n- `_post_process` can generate new files (cached files from dataset transforms or serialized faiss indexes) and their names are defined by `_post_processing_resources`\r\n- as we know what are the post processing resources, we can download them automatically from google storage instead of computing them if they're available (as we do for arrow files)\r\n\r\nI'd happy to discuss these choices !\r\n\r\n## The `wiki_dpr` index\r\n\r\nIt takes 1h20 and ~7GB of memory to compute. The final index is 1.42GB and takes ~1.5GB of memory.\r\nThis is pretty cool given that a naive flat index would take 170GB of memory to store the 21M vectors of dim 768.\r\nI couldn't use directly the Faiss `index_factory` as I needed to set the metric to inner product.\r\n\r\n## Example of usage\r\n\r\n```python\r\nimport nlp\r\ndset = nlp.load_dataset(\r\n \"wiki_dpr\",\r\n \"psgs_w100_with_nq_embeddings\",\r\n split=\"train\",\r\n with_index=True\r\n)\r\nprint(len(dset), dset.list_indexes()) # (21015300, ['embeddings'])\r\n```\r\n\r\n(it also works with the dataset configuration without the embeddings because I added the index file in google storage for this one too)\r\n\r\n## Demo\r\n\r\nYou can also check a demo on google colab that shows how to use it with the DPRQuestionEncoder from transformers:\r\nhttps:\/\/colab.research.google.com\/drive\/1FakNU8W5EPMcWff7iP1H6REg3XSS0YLp?usp=sharing\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/374\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/374\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/373","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/373\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/373\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/373\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/373","id":654845133,"node_id":"MDU6SXNzdWU2NTQ4NDUxMzM=","number":373,"title":"Segmentation fault when loading local JSON dataset as of #372","user":{"login":"vegarab","id":24683907,"node_id":"MDQ6VXNlcjI0NjgzOTA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24683907?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vegarab","html_url":"https:\/\/github.com\/vegarab","followers_url":"https:\/\/api.github.com\/users\/vegarab\/followers","following_url":"https:\/\/api.github.com\/users\/vegarab\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vegarab\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vegarab\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vegarab\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vegarab\/orgs","repos_url":"https:\/\/api.github.com\/users\/vegarab\/repos","events_url":"https:\/\/api.github.com\/users\/vegarab\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vegarab\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-07-10T15:04:25Z","updated_at":"2020-12-15T07:27:20Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The last issue was closed (#369) once the #372 update was merged. However, I'm still not able to load a SQuAD formatted JSON file. Instead of the previously recorded pyarrow error, I now get a segmentation fault. \r\n\r\n```\r\ndataset = nlp.load_dataset('json', data_files={nlp.Split.TRAIN: [\".\/datasets\/train-v2.0.json\"]}, field='data')\r\n```\r\ncauses\r\n```\r\nUsing custom data configuration default\r\nDownloading and preparing dataset json\/default (download: Unknown size, generated: Unknown size, total: Unknown size) to \/home\/XXX\/.cache\/huggingface\/datasets\/json\/default\/0.0.0...\r\n0 tables [00:00, ? tables\/s]Segmentation fault (core dumped)\r\n```\r\nwhere `.\/datasets\/train-v2.0.json` is downloaded directly from https:\/\/rajpurkar.github.io\/SQuAD-explorer\/.\r\nThis is consistent with other SQuAD-formatted JSON files.\r\n\r\nWhen attempting to load the dataset again, I get the following:\r\n```\r\nUsing custom data configuration default\r\nTraceback (most recent call last):\r\n File \"dataloader.py\", line 6, in \r\n 'json', data_files={nlp.Split.TRAIN: [\".\/datasets\/train-v2.0.json\"]}, field='data')\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 524, in load_dataset\r\n save_infos=save_infos,\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 382, in download_and_prepare\r\n with incomplete_dir(self._cache_dir) as tmp_data_dir:\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/contextlib.py\", line 112, in __enter__\r\n return next(self.gen)\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 368, in incomplete_dir\r\n os.makedirs(tmp_dir)\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/os.py\", line 223, in makedirs\r\n mkdir(name, mode)\r\nFileExistsError: [Errno 17] File exists: '\/home\/XXX\/.cache\/huggingface\/datasets\/json\/default\/0.0.0.incomplete'\r\n```\r\n\r\n(Not sure if you wanted this in the previous issue #369 or not as it was closed.)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/373\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/373\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/372","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/372\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/372\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/372\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/372","id":654774420,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ3NDMzNTA4","number":372,"title":"Make the json script more flexible","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-10T13:15:15Z","updated_at":"2020-07-10T14:52:07Z","closed_at":"2020-07-10T14:52:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/372","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/372","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/372.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/372.patch","merged_at":"2020-07-10T14:52:05Z"},"body":"Fix https:\/\/github.com\/huggingface\/nlp\/issues\/359\r\nFix https:\/\/github.com\/huggingface\/nlp\/issues\/369\r\n\r\nJSON script now can accept JSON files containing a single dict with the records as a list in one attribute to the dict (previously it only accepted JSON files containing records as rows of dicts in the file).\r\n\r\nIn this case, you should indicate using `field=XXX` the name of the field in the JSON structure which contains the records you want to load. The records can be a dict of lists or a list of dicts.\r\n\r\nE.g. to load the SQuAD dataset JSON (without using the `squad` specific dataset loading script), in which the data rows are in the `data` field of the JSON dict, you can do:\r\n```python\r\nfrom nlp import load_dataset\r\ndataset = load_dataset('json', data_files='\/PATH\/TO\/JSON', field='data')\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/372\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/372\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/371","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/371\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/371\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/371\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/371","id":654668242,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ3MzQ4NDgw","number":371,"title":"Fix cached file path for metrics with different config names","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-10T10:02:24Z","updated_at":"2020-07-10T13:45:22Z","closed_at":"2020-07-10T13:45:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/371","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/371","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/371.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/371.patch","merged_at":"2020-07-10T13:45:20Z"},"body":"The config name was not taken into account to build the cached file path.\r\nIt should fix #368 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/371\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/371\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/370","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/370\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/370\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/370\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/370","id":654304193,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ3MDU3NTIw","number":370,"title":"Allow indexing Dataset via np.ndarray","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-09T19:43:15Z","updated_at":"2020-07-10T14:05:44Z","closed_at":"2020-07-10T14:05:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/370","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/370","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/370.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/370.patch","merged_at":"2020-07-10T14:05:43Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/370\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/370\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/369","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/369\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/369\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/369\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/369","id":654186890,"node_id":"MDU6SXNzdWU2NTQxODY4OTA=","number":369,"title":"can't load local dataset: pyarrow.lib.ArrowInvalid: straddling object straddles two block boundaries","user":{"login":"vegarab","id":24683907,"node_id":"MDQ6VXNlcjI0NjgzOTA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24683907?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vegarab","html_url":"https:\/\/github.com\/vegarab","followers_url":"https:\/\/api.github.com\/users\/vegarab\/followers","following_url":"https:\/\/api.github.com\/users\/vegarab\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vegarab\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vegarab\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vegarab\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vegarab\/orgs","repos_url":"https:\/\/api.github.com\/users\/vegarab\/repos","events_url":"https:\/\/api.github.com\/users\/vegarab\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vegarab\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-09T16:16:53Z","updated_at":"2020-12-15T23:07:22Z","closed_at":"2020-07-10T14:52:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Trying to load a local SQuAD-formatted dataset (from a JSON file, about 60MB):\r\n```\r\ndataset = nlp.load_dataset(path='json', data_files={nlp.Split.TRAIN: [\".\/path\/to\/file.json\"]})\r\n```\r\ncauses\r\n```\r\nTraceback (most recent call last):\r\n File \"dataloader.py\", line 9, in \r\n [\".\/path\/to\/file.json\"]})\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 524, in load_dataset\r\n save_infos=save_infos,\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 432, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 483, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 719, in _prepare_split\r\n for key, table in utils.tqdm(generator, unit=\" tables\", leave=False):\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/tqdm\/std.py\", line 1129, in __iter__\r\n for obj in iterable:\r\n File \"\/home\/XXX\/.conda\/envs\/torch\/lib\/python3.7\/site-packages\/nlp\/datasets\/json\/88c1bc5c68489f7eda549ed05a5a738527c613b3e7a4ee3524d9d233353a949b\/json.py\", line 53, in _generate_tables\r\n file, read_options=self.config.pa_read_options, parse_options=self.config.pa_parse_options,\r\n File \"pyarrow\/_json.pyx\", line 191, in pyarrow._json.read_json\r\n File \"pyarrow\/error.pxi\", line 85, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: straddling object straddles two block boundaries (try to increase block size?)\r\n```\r\n\r\nI haven't been able to find any reports of this specific pyarrow error here or elsewhere. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/369\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/369\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/368","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/368\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/368\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/368\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/368","id":654087251,"node_id":"MDU6SXNzdWU2NTQwODcyNTE=","number":368,"title":"load_metric can't acquire lock anymore","user":{"login":"ydshieh","id":2521628,"node_id":"MDQ6VXNlcjI1MjE2Mjg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2521628?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ydshieh","html_url":"https:\/\/github.com\/ydshieh","followers_url":"https:\/\/api.github.com\/users\/ydshieh\/followers","following_url":"https:\/\/api.github.com\/users\/ydshieh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ydshieh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ydshieh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ydshieh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ydshieh\/orgs","repos_url":"https:\/\/api.github.com\/users\/ydshieh\/repos","events_url":"https:\/\/api.github.com\/users\/ydshieh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ydshieh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-09T14:04:09Z","updated_at":"2020-07-10T13:45:20Z","closed_at":"2020-07-10T13:45:20Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I can't load metric (glue) anymore after an error in a previous run. I even removed the whole cache folder `\/home\/XXX\/.cache\/huggingface\/`, and the issue persisted. What are the steps to fix this?\r\n\r\n Traceback (most recent call last):\r\n File \"\/home\/XXX\/miniconda3\/envs\/ML-DL-py-3.7\/lib\/python3.7\/site-packages\/nlp\/metric.py\", line 101, in __init__\r\n self.filelock.acquire(timeout=1)\r\n File \"\/home\/XXX\/miniconda3\/envs\/ML-DL-py-3.7\/lib\/python3.7\/site-packages\/filelock.py\", line 278, in acquire\r\n raise Timeout(self._lock_file)\r\n filelock.Timeout: The file lock '\/home\/XXX\/.cache\/huggingface\/metrics\/glue\/1.0.0\/1-glue-0.arrow.lock' could not be acquired.\r\n\r\n During handling of the above exception, another exception occurred:\r\n\r\n Traceback (most recent call last):\r\n File \"examples_huggingface_nlp.py\", line 268, in \r\n main()\r\n File \"examples_huggingface_nlp.py\", line 242, in main\r\n dataset, metric = get_dataset_metric(glue_task)\r\n File \"examples_huggingface_nlp.py\", line 77, in get_dataset_metric\r\n metric = nlp.load_metric('glue', glue_config, experiment_id=1)\r\n File \"\/home\/XXX\/miniconda3\/envs\/ML-DL-py-3.7\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 440, in load_metric\r\n **metric_init_kwargs,\r\n File \"\/home\/XXX\/miniconda3\/envs\/ML-DL-py-3.7\/lib\/python3.7\/site-packages\/nlp\/metric.py\", line 104, in __init__\r\n \"Cannot acquire lock, caching file might be used by another process, \"\r\n ValueError: Cannot acquire lock, caching file might be used by another process, you should setup a unique 'experiment_id' for this run.\r\n I0709 15:54:41.008838 139854118430464 filelock.py:318] Lock 139852058030936 released on \/home\/XXX\/.cache\/huggingface\/metrics\/glue\/1.0.0\/1-glue-0.arrow.lock\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/368\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/368\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/367","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/367\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/367\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/367\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/367","id":654012984,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ2ODIxNTAz","number":367,"title":"Update Xtreme to add PAWS-X es","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-09T12:14:37Z","updated_at":"2020-07-09T12:37:11Z","closed_at":"2020-07-09T12:37:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/367","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/367","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/367.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/367.patch","merged_at":"2020-07-09T12:37:10Z"},"body":"This PR adds the `PAWS-X.es` in the Xtreme dataset #362 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/367\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/367\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/366","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/366\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/366\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/366\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/366","id":653954896,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ2NzcyODE2","number":366,"title":"Add quora dataset","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-09T10:34:22Z","updated_at":"2020-07-13T17:35:21Z","closed_at":"2020-07-13T17:35:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/366","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/366","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/366.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/366.patch","merged_at":"2020-07-13T17:35:21Z"},"body":"Added the [Quora question pairs dataset](https:\/\/www.quora.com\/q\/quoradata\/First-Quora-Dataset-Release-Question-Pairs).\r\n\r\nImplementation Notes:\r\n- I used the original version provided on the quora website. There's also a [Kaggle competition](https:\/\/www.kaggle.com\/c\/quora-question-pairs) which has a nice train\/test split but I can't find an easy way to download it.\r\n- I've made the questions into a list:\r\n ```python\r\n {\r\n \"questions\": [\r\n {\"id\":0, \"text\": \"Is this an example question?\"},\r\n {\"id\":1, \"text\": \"Is this a sample question?\"},\r\n ],\r\n ...\r\n }\r\n ```\r\n rather than:\r\n ```python\r\n {\r\n \"question1\": \"Is this an example question?\",\r\n \"question2\": \"Is this a sample question?\"\r\n \"qid0\": 0\r\n \"qid1\": 1\r\n ...\r\n }\r\n ```\r\n Not sure if this was the right call.\r\n- Can't find a good citation for this dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/366\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/366\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/365","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/365\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/365\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/365\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/365","id":653845964,"node_id":"MDU6SXNzdWU2NTM4NDU5NjQ=","number":365,"title":"How to augment data ?","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-07-09T07:52:37Z","updated_at":"2020-07-10T09:12:07Z","closed_at":"2020-07-10T08:22:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Is there any clean way to augment data ?\r\n\r\nFor now my work-around is to use batched map, like this :\r\n\r\n```python\r\ndef aug(samples):\r\n # Simply copy the existing data to have x2 amount of data\r\n for k, v in samples.items():\r\n samples[k].extend(v)\r\n return samples\r\n\r\ndataset = dataset.map(aug, batched=True)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/365\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/365\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/364","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/364\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/364\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/364\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/364","id":653821597,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ2NjY0NzM5","number":364,"title":"add MS MARCO dataset","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-07-09T07:11:19Z","updated_at":"2020-08-06T06:15:49Z","closed_at":"2020-08-06T06:15:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/364","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/364","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/364.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/364.patch","merged_at":"2020-08-06T06:15:48Z"},"body":"This PR adds the MS MARCO dataset as requested in this issue #336. MS mARCO has multiple task including:\r\n\r\n- Passage and Document Retrieval\r\n\r\n- Keyphrase Extraction\r\n\r\n- QA and NLG\r\n\r\nThis PR only adds the 2 versions of the QA and NLG task dataset which was realeased with the original paper here https:\/\/arxiv.org\/pdf\/1611.09268.pdf \r\n\r\nTests are failing because of the dummy data. I tried to fix it without success. Can you please have a look at it? @patrickvonplaten , @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/364\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/364\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/363","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/363\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/363\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/363\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/363","id":653821172,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ2NjY0NDIy","number":363,"title":"Adding support for generic multi dimensional tensors and auxillary image data for multimodal datasets","user":{"login":"eltoto1219","id":14030663,"node_id":"MDQ6VXNlcjE0MDMwNjYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14030663?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eltoto1219","html_url":"https:\/\/github.com\/eltoto1219","followers_url":"https:\/\/api.github.com\/users\/eltoto1219\/followers","following_url":"https:\/\/api.github.com\/users\/eltoto1219\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eltoto1219\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eltoto1219\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eltoto1219\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eltoto1219\/orgs","repos_url":"https:\/\/api.github.com\/users\/eltoto1219\/repos","events_url":"https:\/\/api.github.com\/users\/eltoto1219\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eltoto1219\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":23,"created_at":"2020-07-09T07:10:30Z","updated_at":"2020-08-24T09:59:35Z","closed_at":"2020-08-24T09:59:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/363","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/363","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/363.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/363.patch","merged_at":"2020-08-24T09:59:35Z"},"body":"nlp\/features.py:\r\n\r\nThe main factory class is MultiArray, every single time this class is called, a corresponding pyarrow extension array and type class is generated (and added to the list of globals for future use) for a given root data type and set of dimensions\/shape. I provide examples on working with this in datasets\/lxmert_pretraining_beta\/test_multi_array.py\r\n\r\nsrc\/nlp\/arrow_writer.py\r\n\r\nI had to add a method for writing batches that include extension array types because despite having a unique class for each multidimensional array shape, pyarrow is unable to write any other \"array-like\" data class to a batch object unless it is of the type pyarrow.ExtensionType. The problem in this is that when writing multiple batches, the order of the schema and data to be written get mixed up (where the pyarrow datatype in the schema only refers to as ExtensionAray, but each ExtensionArray subclass has a different shape) ... possibly I am missing something here and would be grateful if anyone else could take a look!\r\n\r\ndatasets\/lxmert_pretraining_beta\/lxmert_pretraining_beta.py & datasets\/lxmert_pretraining_beta\/to_arrow_data.py:\r\n\r\nI have begun adding the data from the original LXMERT paper (https:\/\/arxiv.org\/abs\/1908.07490) hosted here: (https:\/\/github.com\/airsplay\/lxmert). The reason I am not pulling from the source of truth for each individual dataset is because it seems that there will also need to be functionality to aggregate multimodal datasets to create a pre-training corpus (:sleepy: ). \r\nFor now, this is just being used to test and run edge-cases for the MultiArray feature, so ive labeled it as \"beta_pretraining\"!\r\n\r\n(still working on the pretraining, just wanted to push out the new functionality sooner than later)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/363\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/363\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/362","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/362\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/362\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/362\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/362","id":653766245,"node_id":"MDU6SXNzdWU2NTM3NjYyNDU=","number":362,"title":"[dateset subset missing] xtreme paws-x","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-09T05:04:54Z","updated_at":"2020-07-09T12:38:42Z","closed_at":"2020-07-09T12:38:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I tried nlp.load_dataset('xtreme', 'PAWS-X.es') but get the value error\r\nIt turns out that the subset for Spanish is missing\r\nhttps:\/\/github.com\/google-research-datasets\/paws\/tree\/master\/pawsx","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/362\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/362\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/361","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/361\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/361\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/361\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/361","id":653757376,"node_id":"MDU6SXNzdWU2NTM3NTczNzY=","number":361,"title":"\ud83d\udc1b [Metrics] ROUGE is non-deterministic","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-07-09T04:39:37Z","updated_at":"2020-07-20T23:48:37Z","closed_at":"2020-07-20T23:48:37Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"If I run the ROUGE metric 2 times, with same predictions \/ references, the scores are slightly different.\r\n\r\nRefer to [this Colab notebook](https:\/\/colab.research.google.com\/drive\/1wRssNXgb9ldcp4ulwj-hMJn0ywhDOiDy?usp=sharing) for reproducing the problem.\r\n\r\nExample of F-score for ROUGE-1, ROUGE-2, ROUGE-L in 2 differents run :\r\n\r\n> ['0.3350', '0.1470', '0.2329']\r\n['0.3358', '0.1451', '0.2332']\r\n\r\n---\r\n\r\nWhy ROUGE is not deterministic ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/361\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/361\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/360","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/360\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/360\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/360\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/360","id":653687176,"node_id":"MDU6SXNzdWU2NTM2ODcxNzY=","number":360,"title":"[Feature request] Add dataset.ragged_map() function for many-to-many transformations","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-09T01:04:43Z","updated_at":"2020-07-09T19:31:51Z","closed_at":"2020-07-09T19:31:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`dataset.map()` enables one-to-one transformations. Input one example and output one example. This is helpful for tokenizing and cleaning individual lines.\r\n`dataset.filter()` enables one-to-(one-or-none) transformations. Input one example and output either zero\/one example. This is helpful for removing portions from the dataset.\r\nHowever, some dataset transformations are many-to-many. Consider constructing BERT training examples from a dataset of sentences, where you map `[\"a\", \"b\", \"c\"] -> [\"a[SEP]b\", \"a[SEP]c\", \"b[SEP]c\", \"c[SEP]b\", ...]`\r\n\r\nI propose a more general `ragged_map()` method that takes in a batch of examples of length `N` and return a batch of examples `M`. This is different from the `map(batched=True)` method, which takes examples of length `N` and returns a batch of length `N`, processing individual examples in parallel. I don't have a clear vision of how this would be implemented efficiently and lazily, but would love to hear the community's feedback on this.\r\n\r\nMy specific use case is creating an end-to-end ELECTRA data pipeline. I would like to take the raw WikiText data and generate training examples from this using the `ragged_map()` method, then export to TFRecords and train quickly. This would be a reproducible pipeline with no bash scripts. Currently I'm relying on scripts like https:\/\/github.com\/google-research\/electra\/blob\/master\/build_pretraining_dataset.py, which are less general.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/360\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/360\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/359","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/359\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/359\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/359\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/359","id":653656279,"node_id":"MDU6SXNzdWU2NTM2NTYyNzk=","number":359,"title":"ArrowBasedBuilder _prepare_split parse_schema breaks on nested structures","user":{"login":"timothyjlaurent","id":2000204,"node_id":"MDQ6VXNlcjIwMDAyMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2000204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timothyjlaurent","html_url":"https:\/\/github.com\/timothyjlaurent","followers_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/followers","following_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/orgs","repos_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/repos","events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-07-08T23:24:05Z","updated_at":"2020-07-10T14:52:06Z","closed_at":"2020-07-10T14:52:06Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I tried using the Json dataloader to load some JSON lines files. but get an exception in the parse_schema function.\r\n\r\n```\r\n---------------------------------------------------------------------------\r\n\r\nValueError Traceback (most recent call last)\r\n\r\n in \r\n 55 from nlp import load_dataset\r\n 56 \r\n---> 57 ds = load_dataset(\"..\/text2struct\/model\/dataset_builder.py\", data_files=rel_datafiles)\r\n 58 \r\n 59 \r\n\r\n~\/.virtualenvs\/inv-text2struct\/lib\/python3.6\/site-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n 522 download_mode=download_mode,\r\n 523 ignore_verifications=ignore_verifications,\r\n--> 524 save_infos=save_infos,\r\n 525 )\r\n 526 \r\n\r\n~\/.virtualenvs\/inv-text2struct\/lib\/python3.6\/site-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n 430 verify_infos = not save_infos and not ignore_verifications\r\n 431 self._download_and_prepare(\r\n--> 432 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 433 )\r\n 434 # Sync info\r\n\r\n~\/.virtualenvs\/inv-text2struct\/lib\/python3.6\/site-packages\/nlp\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 481 try:\r\n 482 # Prepare split will record examples associated to the split\r\n--> 483 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 484 except OSError:\r\n 485 raise OSError(\"Cannot find data file. \" + (self.manual_download_instructions or \"\"))\r\n\r\n~\/.virtualenvs\/inv-text2struct\/lib\/python3.6\/site-packages\/nlp\/builder.py in _prepare_split(self, split_generator)\r\n 736 schema_dict[field.name] = Value(str(field.type))\r\n 737 \r\n--> 738 parse_schema(writer.schema, features)\r\n 739 self.info.features = Features(features)\r\n 740 \r\n\r\n~\/.virtualenvs\/inv-text2struct\/lib\/python3.6\/site-packages\/nlp\/builder.py in parse_schema(schema, schema_dict)\r\n 734 parse_schema(field.type.value_type, schema_dict[field.name])\r\n 735 else:\r\n--> 736 schema_dict[field.name] = Value(str(field.type))\r\n 737 \r\n 738 parse_schema(writer.schema, features)\r\n\r\n in __init__(self, dtype, id, _type)\r\n\r\n~\/.virtualenvs\/inv-text2struct\/lib\/python3.6\/site-packages\/nlp\/features.py in __post_init__(self)\r\n 55 \r\n 56 def __post_init__(self):\r\n---> 57 self.pa_type = string_to_arrow(self.dtype)\r\n 58 \r\n 59 def __call__(self):\r\n\r\n~\/.virtualenvs\/inv-text2struct\/lib\/python3.6\/site-packages\/nlp\/features.py in string_to_arrow(type_str)\r\n 32 if str(type_str + \"_\") not in pa.__dict__:\r\n 33 raise ValueError(\r\n---> 34 f\"Neither {type_str} nor {type_str + '_'} seems to be a pyarrow data type. \"\r\n 35 f\"Please make sure to use a correct data type, see: \"\r\n 36 f\"https:\/\/arrow.apache.org\/docs\/python\/api\/datatypes.html#factory-functions\"\r\n\r\nValueError: Neither list nor list_ seems to be a pyarrow data type. Please make sure to use a correct data type, see: https:\/\/arrow.apache.org\/docs\/python\/api\/datatypes.html#factory-functions\r\n```\r\n\r\nIf I create the dataset imperatively, using a pyarrow table, the dataset is created correctly. If I override the `_prepare_split` method to avoid calling the validate schema, the dataset can load as well. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/359\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/359\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/358","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/358\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/358\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/358\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/358","id":653645121,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ2NTI0NjQ5","number":358,"title":"Starting to add some real doc","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-08T22:53:03Z","updated_at":"2020-07-14T09:58:17Z","closed_at":"2020-07-14T09:58:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/358","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/358","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/358.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/358.patch","merged_at":"2020-07-14T09:58:15Z"},"body":"Adding a lot of documentation for:\r\n- load a dataset\r\n- explore the dataset object\r\n- process data with the dataset\r\n- add a new dataset script\r\n- share a dataset script\r\n- full package reference\r\n\r\nThis version of the doc can be explored here: https:\/\/2219-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/index.html\r\n\r\nAlso:\r\n- fix a bug in `train_test_split`\r\n- update the `csv` script\r\n- add a verbose argument to the dataset processing methods\r\n\r\nStill missing:\r\n- doc for the metrics\r\n- how to directly upload a community provided dataset with the CLI\r\n- clean up more docstrings\r\n- add the `features` argument to `load_dataset` (should be another PR)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/358\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/358\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/357","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/357\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/357\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/357\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/357","id":653642292,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ2NTIyMzU2","number":357,"title":"Add hashes to cnn_dailymail","user":{"login":"jbragg","id":2238344,"node_id":"MDQ6VXNlcjIyMzgzNDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2238344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jbragg","html_url":"https:\/\/github.com\/jbragg","followers_url":"https:\/\/api.github.com\/users\/jbragg\/followers","following_url":"https:\/\/api.github.com\/users\/jbragg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jbragg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jbragg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jbragg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jbragg\/orgs","repos_url":"https:\/\/api.github.com\/users\/jbragg\/repos","events_url":"https:\/\/api.github.com\/users\/jbragg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jbragg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-08T22:45:21Z","updated_at":"2020-07-13T14:16:38Z","closed_at":"2020-07-13T14:16:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/357","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/357","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/357.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/357.patch","merged_at":"2020-07-13T14:16:38Z"},"body":"The URL hashes are helpful for comparing results from other sources.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/357\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/357\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/356","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/356\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/356\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/356\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/356","id":653537388,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ2NDM3MDQ5","number":356,"title":"Add text dataset","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-08T19:21:53Z","updated_at":"2020-07-10T14:19:03Z","closed_at":"2020-07-10T14:19:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/356","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/356","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/356.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/356.patch","merged_at":"2020-07-10T14:19:03Z"},"body":"Usage:\r\n\r\n```python\r\nfrom nlp import load_dataset\r\ndset = load_dataset(\"text\", data_files=\"\/path\/to\/file.txt\")[\"train\"]\r\n```\r\n\r\n\r\nI created a dummy_data.zip which contains three files: `train.txt`, `test.txt`, `dev.txt`. Each of these contains two lines. It passes\r\n\r\n```bash\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_dataset_all_configs_text\r\n```\r\n\r\nbut I would like a second set of eyes to ensure I did it right.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/356\/reactions","total_count":6,"+1":2,"-1":0,"laugh":0,"hooray":3,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/356\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/355","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/355\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/355\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/355\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/355","id":653451013,"node_id":"MDU6SXNzdWU2NTM0NTEwMTM=","number":355,"title":"can't load SNLI dataset","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-07-08T16:54:14Z","updated_at":"2020-07-18T05:15:57Z","closed_at":"2020-07-15T07:59:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`nlp` seems to load `snli` from some URL based on nlp.stanford.edu. This subdomain is frequently down -- including right now, when I'd like to load `snli` in a Colab notebook, but can't.\r\n\r\nIs there a plan to move these datasets to huggingface servers for a more stable solution?\r\n\r\nBtw, here's the stack trace:\r\n\r\n```\r\nFile \"\/content\/nlp\/src\/nlp\/builder.py\", line 432, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/content\/nlp\/src\/nlp\/builder.py\", line 466, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/content\/nlp\/src\/nlp\/datasets\/snli\/e417f6f2e16254938d977a17ed32f3998f5b23e4fcab0f6eb1d28784f23ea60d\/snli.py\", line 76, in _split_generators\r\n dl_dir = dl_manager.download_and_extract(_DATA_URL)\r\n File \"\/content\/nlp\/src\/nlp\/utils\/download_manager.py\", line 217, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/content\/nlp\/src\/nlp\/utils\/download_manager.py\", line 156, in download\r\n lambda url: cached_path(url, download_config=self._download_config,), url_or_urls,\r\n File \"\/content\/nlp\/src\/nlp\/utils\/py_utils.py\", line 190, in map_nested\r\n return function(data_struct)\r\n File \"\/content\/nlp\/src\/nlp\/utils\/download_manager.py\", line 156, in \r\n lambda url: cached_path(url, download_config=self._download_config,), url_or_urls,\r\n File \"\/content\/nlp\/src\/nlp\/utils\/file_utils.py\", line 198, in cached_path\r\n local_files_only=download_config.local_files_only,\r\n File \"\/content\/nlp\/src\/nlp\/utils\/file_utils.py\", line 356, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/nlp.stanford.edu\/projects\/snli\/snli_1.0.zip\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/355\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/355\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/354","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/354\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/354\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/354\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/354","id":653357617,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ2MjkyMTc4","number":354,"title":"More faiss control","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-08T14:45:20Z","updated_at":"2020-07-09T09:54:54Z","closed_at":"2020-07-09T09:54:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/354","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/354","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/354.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/354.patch","merged_at":"2020-07-09T09:54:51Z"},"body":"Allow users to specify a faiss index they created themselves, as sometimes indexes can be composite for examples","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/354\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/354\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/353","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/353\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/353\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/353\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/353","id":653250611,"node_id":"MDU6SXNzdWU2NTMyNTA2MTE=","number":353,"title":"[Dataset requests] New datasets for Text Classification","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892884,"node_id":"MDU6TGFiZWwxOTM1ODkyODg0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/help%20wanted","name":"help wanted","color":"008672","default":true,"description":"Extra attention is needed"},{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-07-08T12:17:58Z","updated_at":"2020-10-20T03:41:23Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"We are missing a few datasets for Text Classification which is an important field.\r\n\r\nNamely, it would be really nice to add:\r\n- TREC-6 dataset (see here for instance: https:\/\/pytorchnlp.readthedocs.io\/en\/latest\/source\/torchnlp.datasets.html#torchnlp.datasets.trec_dataset) **[done]**\r\n- Yelp-5\r\n- Movie review (Movie Review (MR) dataset [156]) **[done (same as rotten_tomatoes)]**\r\n- SST (Stanford Sentiment Treebank) **[include in glue]**\r\n- Multi-Perspective Question Answering (MPQA) dataset **[require authentication (indeed manual download)]**\r\n- Amazon. This is a popular corpus of product reviews collected from the Amazon website [159]. It contains labels for both binary classification and multi-class (5-class) classification\r\n- 20 Newsgroups. The 20 Newsgroups dataset **[done]**\r\n- Sogou News dataset **[done]**\r\n- Reuters news. The Reuters-21578 dataset [165] **[done]**\r\n- DBpedia. The DBpedia dataset [170]\r\n- Ohsumed. The Ohsumed collection [171] is a subset of the MEDLINE database\r\n- EUR-Lex. The EUR-Lex dataset\r\n- WOS. The Web Of Science (WOS) dataset **[done]**\r\n- PubMed. PubMed [173]\r\n- TREC-QA. TREC-QA\r\n- Quora. The Quora dataset [180]\r\n\r\nAll these datasets are cited in https:\/\/arxiv.org\/abs\/2004.03705","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/353\/reactions","total_count":6,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":3,"rocket":3,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/353\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/352","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/352\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/352\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/352\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/352","id":653128883,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ2MTA1Mjky","number":352,"title":"\ud83d\udc1b[BugFix]fix seqeval","user":{"login":"AlongWY","id":20281571,"node_id":"MDQ6VXNlcjIwMjgxNTcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20281571?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AlongWY","html_url":"https:\/\/github.com\/AlongWY","followers_url":"https:\/\/api.github.com\/users\/AlongWY\/followers","following_url":"https:\/\/api.github.com\/users\/AlongWY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AlongWY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AlongWY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AlongWY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AlongWY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AlongWY\/repos","events_url":"https:\/\/api.github.com\/users\/AlongWY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AlongWY\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-07-08T09:12:12Z","updated_at":"2020-07-16T08:26:46Z","closed_at":"2020-07-16T08:26:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/352","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/352","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/352.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/352.patch","merged_at":"2020-07-16T08:26:46Z"},"body":"Fix seqeval process labels such as 'B', 'B-ARGM-LOC'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/352\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/352\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/351","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/351\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/351\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/351\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/351","id":652424048,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ1NDk0NTE4","number":351,"title":"add pandas dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-07T15:38:07Z","updated_at":"2020-07-08T14:15:16Z","closed_at":"2020-07-08T14:15:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/351","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/351","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/351.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/351.patch","merged_at":"2020-07-08T14:15:15Z"},"body":"Create a dataset from serialized pandas dataframes.\r\nUsage:\r\n```python\r\nfrom nlp import load_dataset\r\ndset = load_dataset(\"pandas\", data_files=\"df.pkl\")[\"train\"]\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/351\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/351\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/350","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/350\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/350\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/350\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/350","id":652398691,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ1NDczODYz","number":350,"title":"add from_pandas and from_dict","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-07T15:03:53Z","updated_at":"2020-07-08T14:14:33Z","closed_at":"2020-07-08T14:14:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/350","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/350","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/350.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/350.patch","merged_at":"2020-07-08T14:14:32Z"},"body":"I added two new methods to the `Dataset` class:\r\n- `from_pandas()` to create a dataset from a pandas dataframe\r\n- `from_dict()` to create a dataset from a dictionary (keys = columns)\r\n\r\nIt uses the `pa.Table.from_pandas` and `pa.Table.from_pydict` funcitons to do so.\r\nIt is also possible to specify the features types via `features=...` if there are ambiguities (null\/nan values), otherwise the arrow schema is infered from the data automatically by pyarrow.\r\n\r\nOne question that I have right now:\r\n+ Should we also add a `save()` method that would write the dataset on the disk ? Right now if we create a `Dataset` using those two new methods, the data are kept in RAM. Then to reload it we can call the `from_file()` method.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/350\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/350\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/349","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/349\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/349\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/349\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/349","id":652231571,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ1MzQwMTQ1","number":349,"title":"Hyperpartisan news detection","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-07T11:06:37Z","updated_at":"2020-07-07T20:47:27Z","closed_at":"2020-07-07T14:57:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/349","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/349","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/349.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/349.patch","merged_at":"2020-07-07T14:57:11Z"},"body":"Adding the hyperpartisan news detection dataset from PAN. This contains news article text, labelled with whether they're hyper-partisan and why kinds of biases they display.\r\n\r\nImplementation notes:\r\n- As with many PAN tasks, the data is hosted on [Zenodo](https:\/\/zenodo.org\/record\/1489920) and must be requested before use. I've used the manual download stuff for this, although the dataset is provided under a Creative Commons Attribution 4.0 International License, so we could host a version if we wanted to?\r\n- The 'bias' attribute doesn't exist for the 'byarticle' configuration. I've added an empty string to the class labels to deal with this. Is there a more standard value for empty data?\r\n- Should we always subclass `nlp.BuilderConfig`?\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/349\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/349\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/348","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/348\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/348\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/348\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/348","id":652158308,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ1MjgwNjk3","number":348,"title":"Add OSCAR dataset","user":{"login":"pjox","id":635220,"node_id":"MDQ6VXNlcjYzNTIyMA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/635220?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pjox","html_url":"https:\/\/github.com\/pjox","followers_url":"https:\/\/api.github.com\/users\/pjox\/followers","following_url":"https:\/\/api.github.com\/users\/pjox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pjox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pjox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pjox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pjox\/orgs","repos_url":"https:\/\/api.github.com\/users\/pjox\/repos","events_url":"https:\/\/api.github.com\/users\/pjox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pjox\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":20,"created_at":"2020-07-07T09:22:07Z","updated_at":"2021-05-03T22:07:08Z","closed_at":"2021-02-09T10:19:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/348","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/348","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/348.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/348.patch","merged_at":null},"body":"I don't know if tests pass, when I run them it tries to download the whole corpus which is around 3.5TB compressed and I don't have that kind of space. I'll really need some help with it \ud83d\ude05 \r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/348\/reactions","total_count":4,"+1":0,"-1":0,"laugh":0,"hooray":4,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/348\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/347","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/347\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/347\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/347\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/347","id":652106567,"node_id":"MDU6SXNzdWU2NTIxMDY1Njc=","number":347,"title":"'cp950' codec error from load_dataset('xtreme', 'tydiqa')","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-07-07T08:14:23Z","updated_at":"2020-09-07T14:51:45Z","closed_at":"2020-09-07T14:51:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"![image](https:\/\/user-images.githubusercontent.com\/50871412\/86744744-67481680-c06c-11ea-8612-b77eba92a392.png)\r\n\r\nI guess the error is related to python source encoding issue that my PC is trying to decode the source code with wrong encoding-decoding tools, perhaps :\r\nhttps:\/\/www.python.org\/dev\/peps\/pep-0263\/\r\n\r\nI guess the error was triggered by the code \" module = importlib.import_module(module_path)\" at line 57 in the source code: nlp\/src\/nlp\/load.py \/ (https:\/\/github.com\/huggingface\/nlp\/blob\/911d5596f9b500e39af8642fe3d1b891758999c7\/src\/nlp\/load.py#L51)\r\n\r\nAny ideas?\r\n\r\np.s. tried the same code on colab, that runs perfectly\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/347\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/347\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/346","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/346\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/346\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/346\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/346","id":652044151,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ1MTg4MTUz","number":346,"title":"Add emotion dataset","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-07-07T06:35:41Z","updated_at":"2021-04-23T07:13:43Z","closed_at":"2020-07-13T14:39:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/346","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/346","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/346.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/346.patch","merged_at":"2020-07-13T14:39:38Z"},"body":"Hello \ud83e\udd17 team!\r\n\r\nI am trying to add an emotion classification dataset ([link](https:\/\/github.com\/dair-ai\/emotion_dataset)) to `nlp` but I am a bit stuck about what I should do when the URL for the dataset is not a ZIP file, but just a pickled `pandas.DataFrame` (see [here](https:\/\/www.dropbox.com\/s\/607ptdakxuh5i4s\/merged_training.pkl)).\r\n\r\nWith the current implementation, running\r\n\r\n```bash\r\npython nlp-cli test datasets\/emotion --save_infos --all_configs\r\n```\r\n\r\nthrows a `_pickle.UnpicklingError: invalid load key, '<'.` error (full stack trace below). The strange thing is that the path to the file does not carry the `.pkl` extension and instead appears to be some md5 hash (see the `FILE PATH` print statement in the stack trace).\r\n\r\nNote: I have checked that the `merged_training.pkl` file is not corrupted when I download it with `wget`. \r\n\r\nAny pointers on what I'm doing wrong would be greatly appreciated!\r\n\r\n**Stack trace**\r\n\r\n```\r\nINFO:nlp.load:Checking datasets\/emotion\/emotion.py for additional imports.\r\nINFO:filelock:Lock 140330435928512 acquired on datasets\/emotion\/emotion.py.lock\r\nINFO:nlp.load:Found main folder for dataset datasets\/emotion\/emotion.py at \/Users\/lewtun\/git\/nlp\/src\/nlp\/datasets\/emotion\r\nINFO:nlp.load:Creating specific version folder for dataset datasets\/emotion\/emotion.py at \/Users\/lewtun\/git\/nlp\/src\/nlp\/datasets\/emotion\/59666994754d1b369228a749b695e377643d141fa98c6972be00407659788c7b\r\nINFO:nlp.load:Copying script file from datasets\/emotion\/emotion.py to \/Users\/lewtun\/git\/nlp\/src\/nlp\/datasets\/emotion\/59666994754d1b369228a749b695e377643d141fa98c6972be00407659788c7b\/emotion.py\r\nINFO:nlp.load:Couldn't find dataset infos file at datasets\/emotion\/dataset_infos.json\r\nINFO:nlp.load:Creating metadata file for dataset datasets\/emotion\/emotion.py at \/Users\/lewtun\/git\/nlp\/src\/nlp\/datasets\/emotion\/59666994754d1b369228a749b695e377643d141fa98c6972be00407659788c7b\/emotion.json\r\nINFO:filelock:Lock 140330435928512 released on datasets\/emotion\/emotion.py.lock\r\nINFO:nlp.builder:Generating dataset emotion (\/Users\/lewtun\/.cache\/huggingface\/datasets\/emotion\/emotion\/1.0.0)\r\nINFO:nlp.builder:Dataset not on Hf google storage. Downloading and preparing it from source\r\nDownloading and preparing dataset emotion\/emotion (download: Unknown size, generated: Unknown size, total: Unknown size) to \/Users\/lewtun\/.cache\/huggingface\/datasets\/emotion\/emotion\/1.0.0...\r\nINFO:nlp.builder:Generating split train\r\n0 examples [00:00, ? examples\/s]FILE PATH \/Users\/lewtun\/.cache\/huggingface\/datasets\/3615dcb52b7ba052ef63e1571894c4b67e8e12a6ab1ef2f756ec3c380bf48490\r\nTraceback (most recent call last):\r\n File \"nlp-cli\", line 37, in \r\n service.run()\r\n File \"\/Users\/lewtun\/git\/nlp\/src\/nlp\/commands\/test.py\", line 83, in run\r\n builder.download_and_prepare(\r\n File \"\/Users\/lewtun\/git\/nlp\/src\/nlp\/builder.py\", line 431, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/Users\/lewtun\/git\/nlp\/src\/nlp\/builder.py\", line 483, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/Users\/lewtun\/git\/nlp\/src\/nlp\/builder.py\", line 664, in _prepare_split\r\n for key, record in utils.tqdm(generator, unit=\" examples\", total=split_info.num_examples, leave=False):\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/nlp\/lib\/python3.8\/site-packages\/tqdm\/std.py\", line 1129, in __iter__\r\n for obj in iterable:\r\n File \"\/Users\/lewtun\/git\/nlp\/src\/nlp\/datasets\/emotion\/59666994754d1b369228a749b695e377643d141fa98c6972be00407659788c7b\/emotion.py\", line 87, in _generate_examples\r\n data = pickle.load(f)\r\n_pickle.UnpicklingError: invalid load key, '<'.\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/346\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/346\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/345","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/345\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/345\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/345\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/345","id":651761201,"node_id":"MDU6SXNzdWU2NTE3NjEyMDE=","number":345,"title":"Supporting documents in ELI5","user":{"login":"saverymax","id":29262273,"node_id":"MDQ6VXNlcjI5MjYyMjcz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29262273?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/saverymax","html_url":"https:\/\/github.com\/saverymax","followers_url":"https:\/\/api.github.com\/users\/saverymax\/followers","following_url":"https:\/\/api.github.com\/users\/saverymax\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/saverymax\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/saverymax\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/saverymax\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/saverymax\/orgs","repos_url":"https:\/\/api.github.com\/users\/saverymax\/repos","events_url":"https:\/\/api.github.com\/users\/saverymax\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/saverymax\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-06T19:14:13Z","updated_at":"2020-10-27T15:38:45Z","closed_at":"2020-10-27T15:38:45Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I was attempting to use the ELI5 dataset, when I realized that huggingface does not provide the supporting documents (the source documents from the common crawl). Without the supporting documents, this makes the dataset about as useful for my project as a block of cheese, or some other more apt metaphor. According to facebook, the entire document collection is quite large. However, it would still be helpful to at least include a subset of the supporting documents i.e., having some data is better than having a block of cheese, in my case at least.\r\n\r\nIf you choose not to include them, it would be helpful to have documentation mentioning this specifically. It is especially confusing because the hf nlp ELI5 dataset has the key `'document'` but there are no documents to be found :(","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/345\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/345\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/344","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/344\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/344\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/344\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/344","id":651495246,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ0NzQwMTIw","number":344,"title":"Search qa","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-06T12:23:16Z","updated_at":"2020-07-16T08:58:16Z","closed_at":"2020-07-16T08:58:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/344","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/344","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/344.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/344.patch","merged_at":null},"body":"This PR adds the Search QA dataset used in **SearchQA: A New Q&A Dataset Augmented with Context from a Search Engine**. The dataset has the following config name:\r\n\r\n- raw_jeopardy: raw data\r\n\r\n- train_test_val: which is the splitted version\r\n\r\n#336 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/344\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/344\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/343","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/343\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/343\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/343\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/343","id":651419630,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ0Njc4NDEw","number":343,"title":"Fix nested tensorflow format","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-06T10:13:45Z","updated_at":"2020-07-06T13:11:52Z","closed_at":"2020-07-06T13:11:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/343","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/343","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/343.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/343.patch","merged_at":"2020-07-06T13:11:51Z"},"body":"In #339 and #337 we are thinking about adding a way to export datasets to tfrecords.\r\n\r\nHowever I noticed that it was not possible to do `dset.set_format(\"tensorflow\")` on datasets with nested features like `squad`. I fixed that using a nested map operations to convert features to `tf.ragged.constant`.\r\n\r\nI also added tests on the `set_format` function.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/343\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/343\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/342","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/342\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/342\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/342\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/342","id":651333194,"node_id":"MDU6SXNzdWU2NTEzMzMxOTQ=","number":342,"title":"Features should be updated when `map()` changes schema","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-06T08:03:23Z","updated_at":"2020-07-23T10:15:16Z","closed_at":"2020-07-23T10:15:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`dataset.map()` can change the schema and column names.\r\n\r\nWe should update the features in this case (with what is possible to infer).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/342\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/342\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/341","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/341\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/341\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/341\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/341","id":650611969,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ0MDcwMjEx","number":341,"title":"add fever dataset","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-03T13:53:07Z","updated_at":"2020-07-06T13:03:48Z","closed_at":"2020-07-06T13:03:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/341","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/341","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/341.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/341.patch","merged_at":"2020-07-06T13:03:47Z"},"body":"This PR add the FEVER dataset https:\/\/fever.ai\/ used in with the paper: FEVER: a large-scale dataset for Fact Extraction and VERification (https:\/\/arxiv.org\/pdf\/1803.05355.pdf).\r\n#336 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/341\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/341\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/340","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/340\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/340\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/340\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/340","id":650533920,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQ0MDA2Nzcy","number":340,"title":"Update cfq.py","user":{"login":"brainshawn","id":4437290,"node_id":"MDQ6VXNlcjQ0MzcyOTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4437290?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/brainshawn","html_url":"https:\/\/github.com\/brainshawn","followers_url":"https:\/\/api.github.com\/users\/brainshawn\/followers","following_url":"https:\/\/api.github.com\/users\/brainshawn\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/brainshawn\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/brainshawn\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/brainshawn\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/brainshawn\/orgs","repos_url":"https:\/\/api.github.com\/users\/brainshawn\/repos","events_url":"https:\/\/api.github.com\/users\/brainshawn\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/brainshawn\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-03T11:23:19Z","updated_at":"2020-07-03T12:33:50Z","closed_at":"2020-07-03T12:33:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/340","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/340","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/340.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/340.patch","merged_at":"2020-07-03T12:33:50Z"},"body":"Make the dataset name consistent with in the paper: Compositional Freebase Question => Compositional Freebase Questions.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/340\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/340\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/339","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/339\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/339\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/339\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/339","id":650156468,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQzNzAyNTcw","number":339,"title":"Add dataset.export() to TFRecords","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":18,"created_at":"2020-07-02T19:26:27Z","updated_at":"2020-07-22T09:16:12Z","closed_at":"2020-07-22T09:16:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/339","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/339","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/339.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/339.patch","merged_at":"2020-07-22T09:16:11Z"},"body":"Fixes https:\/\/github.com\/huggingface\/nlp\/issues\/337\r\n\r\nSome design decisions:\r\n\r\n- Simplified the function API to not handle sharding. It writes the entire dataset as a single TFRecord file. This simplifies the function logic and users can use other functions (`select`, `shard`, etc) to handle custom sharding or splitting.\r\n- Use `from_generator()` instead of `from_tensor_slices()` to address the memory issues discussed in https:\/\/github.com\/huggingface\/nlp\/issues\/315 and https:\/\/github.com\/huggingface\/nlp\/issues\/193.\r\n- Performs introspection using the values from `dataset.set_format()` to identify the TF datatypes. Currently it supports string, float, and int. If this should be extended for other datatypes, let me know.\r\n- There are quite a few helper functions required within the `export()` method. If these are better placed in a utils file somewhere, let me know.\r\n\r\nAlso, I noticed that \r\n```python\r\ndataset = dataset.select(indices)\r\ndataset.set_format(\"tensorflow\")\r\n# dataset._format_type is \"tensorflow\"\r\n```\r\ngives a different output than\r\n```python\r\ndataset.set_format(\"tensorflow\")\r\ndataset = dataset.select(indices)\r\n# dataset._format_type is None\r\n```\r\nThe latter loses the format of its parent dataset. Is there interest in making `set_format` a functional method that returns itself (can be chained), and that derived datasets maintain the format of their parent?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/339\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":3,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/339\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/338","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/338\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/338\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/338\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/338","id":650057253,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQzNjIxMTEx","number":338,"title":"Run `make style`","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-02T16:19:47Z","updated_at":"2020-07-02T18:03:10Z","closed_at":"2020-07-02T18:03:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/338","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/338","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/338.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/338.patch","merged_at":"2020-07-02T18:03:10Z"},"body":"These files get changed when I run `make style` on an unrelated PR. Upstreaming these changes so development on a different branch can be easier.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/338\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/338\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/337","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/337\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/337\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/337\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/337","id":650035887,"node_id":"MDU6SXNzdWU2NTAwMzU4ODc=","number":337,"title":"[Feature request] Export Arrow dataset to TFRecords","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-07-02T15:47:12Z","updated_at":"2020-07-22T09:16:12Z","closed_at":"2020-07-22T09:16:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The TFRecord generation process is error-prone and requires complex separate Python scripts to download and preprocess the data. I propose to combine the user-friendly features of `nlp` with the speed and efficiency of TFRecords. Sample API:\r\n\r\n```python\r\n# use these existing methods\r\nds = load_dataset(\"wikitext\", \"wikitext-2-raw-v1\", split=\"train\")\r\nds = ds.map(lambda ex: tokenizer(ex))\r\nds.set_format(\"tensorflow\", columns=[\"input_ids\", \"token_type_ids\", \"attention_mask\"])\r\n# then add this method\r\nds.export(folder=\"\/my\/tfrecords\", prefix=\"myrecord\", num_shards=8, format=\"tfrecord\")\r\n```\r\nwhich would create files like so:\r\n```bash\r\n\/my\/tfrecords\/myrecord_1.tfrecord\r\n\/my\/tfrecords\/myrecord_2.tfrecord\r\n...\r\n```\r\n\r\nI would be happy to contribute this method. We could use a similar approach for PyTorch. Thoughts?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/337\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/337\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/336","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/336\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/336\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/336\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/336","id":649914203,"node_id":"MDU6SXNzdWU2NDk5MTQyMDM=","number":336,"title":"[Dataset requests] New datasets for Open Question Answering","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892884,"node_id":"MDU6TGFiZWwxOTM1ODkyODg0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/help%20wanted","name":"help wanted","color":"008672","default":true,"description":"Extra attention is needed"},{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2020-07-02T13:03:03Z","updated_at":"2020-07-16T09:04:22Z","closed_at":"2020-07-16T09:04:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"We are still a few datasets missing for Open-Question Answering which is currently a field in strong development.\r\n\r\nNamely, it would be really nice to add:\r\n- WebQuestions (Berant et al., 2013) [done]\r\n- CuratedTrec (Baudis et al. 2015) [not open-source]\r\n- MS-MARCO (NGuyen et al. 2016) [done]\r\n- SearchQA (Dunn et al. 2017) [done]\r\n- FEVER (Thorne et al. 2018) - [ done]\r\n\r\n \r\n\r\nAll these datasets are cited in http:\/\/arxiv.org\/abs\/2005.11401","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/336\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/336\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/335","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/335\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/335\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/335\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/335","id":649765179,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQzMzgwMjI1","number":335,"title":"BioMRC Dataset presented in BioNLP 2020 ACL Workshop","user":{"login":"PetrosStav","id":15162021,"node_id":"MDQ6VXNlcjE1MTYyMDIx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15162021?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PetrosStav","html_url":"https:\/\/github.com\/PetrosStav","followers_url":"https:\/\/api.github.com\/users\/PetrosStav\/followers","following_url":"https:\/\/api.github.com\/users\/PetrosStav\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PetrosStav\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PetrosStav\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PetrosStav\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PetrosStav\/orgs","repos_url":"https:\/\/api.github.com\/users\/PetrosStav\/repos","events_url":"https:\/\/api.github.com\/users\/PetrosStav\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PetrosStav\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-02T09:03:41Z","updated_at":"2020-07-15T08:02:07Z","closed_at":"2020-07-15T08:02:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/335","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/335","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/335.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/335.patch","merged_at":"2020-07-15T08:02:07Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/335\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/335\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/334","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/334\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/334\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/334\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/334","id":649661791,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQzMjk1NjQ0","number":334,"title":"Add dataset.shard() method","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-07-02T06:05:19Z","updated_at":"2020-07-06T12:35:36Z","closed_at":"2020-07-06T12:35:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/334","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/334","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/334.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/334.patch","merged_at":"2020-07-06T12:35:36Z"},"body":"Fixes https:\/\/github.com\/huggingface\/nlp\/issues\/312","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/334\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/334\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/333","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/333\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/333\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/333\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/333","id":649236516,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQyOTE1NDQ0","number":333,"title":"fix variable name typo","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-01T19:13:50Z","updated_at":"2020-07-24T15:43:31Z","closed_at":"2020-07-24T08:32:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/333","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/333","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/333.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/333.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/333\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/333\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/332","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/332\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/332\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/332\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/332","id":649140135,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQyODMwMzMz","number":332,"title":"Add wiki_dpr","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-07-01T17:12:00Z","updated_at":"2020-07-06T12:21:17Z","closed_at":"2020-07-06T12:21:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/332","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/332","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/332.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/332.patch","merged_at":"2020-07-06T12:21:16Z"},"body":"Presented in the [Dense Passage Retrieval paper](https:\/\/arxiv.org\/pdf\/2004.04906.pdf), this dataset consists in 21M passages from the english wikipedia along with their 768-dim embeddings computed using DPR's context encoder.\r\n\r\nNote on the implementation:\r\n- There are two configs: with and without the embeddings (73GB vs 14GB)\r\n- I used a non-fixed-size sequence of floats to describe the feature format of the embeddings. I wanted to use fixed-size sequences but I had issues with reading the arrow file afterwards (for example `dataset[0]` was crashing)\r\n- I added the case for lists of urls as input of the download_manager","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/332\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/332\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/331","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/331\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/331\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/331\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/331","id":648533199,"node_id":"MDU6SXNzdWU2NDg1MzMxOTk=","number":331,"title":"Loading CNN\/Daily Mail dataset produces `nlp.utils.info_utils.NonMatchingSplitsSizesError`","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-06-30T22:21:33Z","updated_at":"2020-07-09T13:03:40Z","closed_at":"2020-07-09T13:03:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\n>>> import nlp\r\n>>> nlp.load_dataset('cnn_dailymail', '3.0.0')\r\nDownloading and preparing dataset cnn_dailymail\/3.0.0 (download: 558.32 MiB, generated: 1.26 GiB, total: 1.81 GiB) to \/u\/jm8wx\/.cache\/huggingface\/datasets\/cnn_dailymail\/3.0.0\/3.0.0...\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/p\/qdata\/jm8wx\/datasets\/nlp\/src\/nlp\/load.py\", line 520, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/p\/qdata\/jm8wx\/datasets\/nlp\/src\/nlp\/builder.py\", line 431, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/p\/qdata\/jm8wx\/datasets\/nlp\/src\/nlp\/builder.py\", line 488, in _download_and_prepare\r\n verify_splits(self.info.splits, split_dict)\r\n File \"\/p\/qdata\/jm8wx\/datasets\/nlp\/src\/nlp\/utils\/info_utils.py\", line 70, in verify_splits\r\n raise NonMatchingSplitsSizesError(str(bad_splits))\r\nnlp.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='test', num_bytes=49424491, num_examples=11490, dataset_name='cnn_dailymail'), 'recorded': SplitInfo(name='test', num_bytes=48931393, num_examples=11379, dataset_name='cnn_dailymail')}, {'expected': SplitInfo(name='train', num_bytes=1249178681, num_examples=287113, dataset_name='cnn_dailymail'), 'recorded': SplitInfo(name='train', num_bytes=1240618482, num_examples=285161, dataset_name='cnn_dailymail')}, {'expected': SplitInfo(name='validation', num_bytes=57149241, num_examples=13368, dataset_name='cnn_dailymail'), 'recorded': SplitInfo(name='validation', num_bytes=56637485, num_examples=13255, dataset_name='cnn_dailymail')}]\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/331\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/331\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/330","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/330\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/330\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/330\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/330","id":648525720,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQyMzIxMjEw","number":330,"title":"Doc red","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-30T22:05:31Z","updated_at":"2020-07-06T12:10:39Z","closed_at":"2020-07-05T12:27:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/330","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/330","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/330.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/330.patch","merged_at":"2020-07-05T12:27:29Z"},"body":"Adding [DocRED](https:\/\/github.com\/thunlp\/DocRED) - a relation extraction dataset which tests document-level RE. A few implementation notes:\r\n\r\n- There are 2 separate versions of the training set - *annotated* and *distant*. Instead of `nlp.Split.Train` I've used the splits `\"train_annotated\"` and `\"train_distant\"` to reflect this.\r\n- As well as the relation id, the full relation name is mapped from `rel_info.json`\r\n- I renamed the 'h', 'r', 't' keys to 'head', 'relation' and 'tail' to make them more readable.\r\n- Used the fix from #319 to allow nested sequences of dicts.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/330\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/330\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/329","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/329\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/329\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/329\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/329","id":648446979,"node_id":"MDU6SXNzdWU2NDg0NDY5Nzk=","number":329,"title":"[Bug] FileLock dependency incompatible with filesystem","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-06-30T19:45:31Z","updated_at":"2020-07-01T06:55:58Z","closed_at":"2020-06-30T21:33:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm downloading a dataset successfully with\r\n`load_dataset(\"wikitext\", \"wikitext-2-raw-v1\")`\r\n\r\nBut when I attempt to cache it on an external volume, it hangs indefinitely:\r\n`load_dataset(\"wikitext\", \"wikitext-2-raw-v1\", cache_dir=\"\/fsx\") # \/fsx is an external volume mount`\r\n\r\nThe filesystem when hanging looks like this:\r\n```bash\r\n\/fsx\r\n----downloads\r\n ----94be...73.lock\r\n----wikitext\r\n ----wikitext-2-raw\r\n ----wikitext-2-raw-1.0.0.incomplete\r\n```\r\n\r\nIt appears that on this filesystem, the FileLock object is forever stuck in its \"acquire\" stage. I have verified that the issue lies specifically with the `filelock` dependency:\r\n```python\r\nopen(\"\/fsx\/hello.txt\").write(\"hello\") # succeeds\r\n\r\nfrom filelock import FileLock\r\nwith FileLock(\"\/fsx\/hello.lock\"):\r\n open(\"\/fsx\/hello.txt\").write(\"hello\") # hangs indefinitely\r\n```\r\n\r\nHas anyone else run into this issue? I'd raise it directly on the FileLock repo, but that project appears abandoned with the last update over a year ago. Or if there's a solution that would remove the FileLock dependency from the project, I would appreciate that.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/329\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/329\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/328","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/328\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/328\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/328\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/328","id":648326841,"node_id":"MDU6SXNzdWU2NDgzMjY4NDE=","number":328,"title":"Fork dataset","user":{"login":"timothyjlaurent","id":2000204,"node_id":"MDQ6VXNlcjIwMDAyMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2000204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timothyjlaurent","html_url":"https:\/\/github.com\/timothyjlaurent","followers_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/followers","following_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/orgs","repos_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/repos","events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-06-30T16:42:53Z","updated_at":"2020-07-06T21:43:59Z","closed_at":"2020-07-06T21:43:59Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"We have a multi-task learning model training I'm trying to convert to using the Arrow-based nlp dataset. \r\n\r\nWe're currently training a custom TensorFlow model but the nlp paradigm should be a bridge for us to be able to use the wealth of pre-trained models in Transformers.\r\n\r\nOur preprocessing flow parses raw text and json with Entity and Relations annotations and creates 2 datasets for training a NER and Relations prediction heads.\r\n\r\nIs there some good way to \"fork\" dataset-\r\n\r\nEG\r\n\r\n1. text + json -> Dataset1\r\n1. Dataset1 -> DatasetNER\r\n1. Dataset1 -> DatasetREL\r\n\r\nor \r\n\r\n1. text + json -> Dataset1\r\n1. Dataset1 -> DatasetNER\r\n1. Dataset1 + DatasetNER -> DatasetREL\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/328\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/328\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/327","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/327\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/327\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/327\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/327","id":648312858,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQyMTQyOTQw","number":327,"title":"set seed for suffling tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-30T16:21:34Z","updated_at":"2020-07-02T08:34:05Z","closed_at":"2020-07-02T08:34:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/327","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/327","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/327.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/327.patch","merged_at":"2020-07-02T08:34:04Z"},"body":"Some tests were randomly failing because of a missing seed in a test for `train_test_split(shuffle=True)`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/327\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/327\/timeline","performed_via_github_app":null} {"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/326","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/326\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/326\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/326\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/326","id":648126103,"node_id":"MDU6SXNzdWU2NDgxMjYxMDM=","number":326,"title":"Large dataset in Squad2-format","user":{"login":"flozi00","id":47894090,"node_id":"MDQ6VXNlcjQ3ODk0MDkw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47894090?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/flozi00","html_url":"https:\/\/github.com\/flozi00","followers_url":"https:\/\/api.github.com\/users\/flozi00\/followers","following_url":"https:\/\/api.github.com\/users\/flozi00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/flozi00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/flozi00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/flozi00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/flozi00\/orgs","repos_url":"https:\/\/api.github.com\/users\/flozi00\/repos","events_url":"https:\/\/api.github.com\/users\/flozi00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/flozi00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-06-30T12:18:59Z","updated_at":"2020-07-09T09:01:50Z","closed_at":"2020-07-09T09:01:50Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"At the moment we are building an large question answering dataset and think about sharing it with the huggingface community.\r\nCaused the computing power we splitted it into multiple tiles, but they are all in the same format.\r\nRight now the most important facts about are this:\r\n- Contexts: 1.047.671\r\n- questions: 1.677.732\r\n- Answers: 6.742.406\r\n- unanswerable: 377.398\r\n\r\nIt is already cleaned\r\n\r\n
\r\ntrain_data = [\r\n    {\r\n        'context': \"this is the context\",\r\n        'qas': [\r\n            {\r\n                'id': \"00002\",\r\n                'is_impossible': False,\r\n                'question': \"whats is this\",\r\n                'answers': [\r\n                    {\r\n                        'text': \"answer\",\r\n                        'answer_start': 0\r\n                    }\r\n                ]\r\n            },\r\n            {\r\n                'id': \"00003\",\r\n                'is_impossible': False,\r\n                'question': \"question2\",\r\n                'answers': [\r\n                    {\r\n                        'text': \"answer2\",\r\n                        'answer_start': 1\r\n                    }\r\n                ]\r\n            }\r\n        ]\r\n    }\r\n]\r\n<\/code><\/pre>\r\n\r\nCause it is growing every day we are thinking about an structure like this:\r\nWe host an Json file, containing all the download links and the script can load it dynamically.\r\nAt the moment it is around ~20GB\r\n\r\nAny advice how to handle this, or an ready to use template ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/326\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/326\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/325","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/325\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/325\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/325\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/325","id":647601592,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQxNTk3NTgw","number":325,"title":"Add SQuADShifts dataset","user":{"login":"millerjohnp","id":8953195,"node_id":"MDQ6VXNlcjg5NTMxOTU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8953195?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/millerjohnp","html_url":"https:\/\/github.com\/millerjohnp","followers_url":"https:\/\/api.github.com\/users\/millerjohnp\/followers","following_url":"https:\/\/api.github.com\/users\/millerjohnp\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/millerjohnp\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/millerjohnp\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/millerjohnp\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/millerjohnp\/orgs","repos_url":"https:\/\/api.github.com\/users\/millerjohnp\/repos","events_url":"https:\/\/api.github.com\/users\/millerjohnp\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/millerjohnp\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-29T19:11:16Z","updated_at":"2020-06-30T17:07:31Z","closed_at":"2020-06-30T17:07:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/325","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/325","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/325.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/325.patch","merged_at":"2020-06-30T17:07:31Z"},"body":"This PR adds the four new variants of the SQuAD dataset used in [The Effect of Natural Distribution Shift on Question Answering Models](https:\/\/arxiv.org\/abs\/2004.14444) to facilitate evaluating model robustness to distribution shift.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/325\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/325\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/324","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/324\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/324\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/324\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/324","id":647525725,"node_id":"MDU6SXNzdWU2NDc1MjU3MjU=","number":324,"title":"Error when calculating glue score","user":{"login":"D-i-l-r-u-k-s-h-i","id":47185867,"node_id":"MDQ6VXNlcjQ3MTg1ODY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47185867?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/D-i-l-r-u-k-s-h-i","html_url":"https:\/\/github.com\/D-i-l-r-u-k-s-h-i","followers_url":"https:\/\/api.github.com\/users\/D-i-l-r-u-k-s-h-i\/followers","following_url":"https:\/\/api.github.com\/users\/D-i-l-r-u-k-s-h-i\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/D-i-l-r-u-k-s-h-i\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/D-i-l-r-u-k-s-h-i\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/D-i-l-r-u-k-s-h-i\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/D-i-l-r-u-k-s-h-i\/orgs","repos_url":"https:\/\/api.github.com\/users\/D-i-l-r-u-k-s-h-i\/repos","events_url":"https:\/\/api.github.com\/users\/D-i-l-r-u-k-s-h-i\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/D-i-l-r-u-k-s-h-i\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-06-29T16:53:48Z","updated_at":"2020-07-09T09:13:34Z","closed_at":"2020-07-09T09:13:34Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I was trying glue score along with other metrics here. But glue gives me this error;\r\n\r\n```\r\nimport nlp\r\nglue_metric = nlp.load_metric('glue',name=\"cola\")\r\n\r\nglue_score = glue_metric.compute(predictions, references)\r\n```\r\n\r\n```\r\n---------------------------------------------------------------------------\r\n---------------------------------------------------------------------------\r\nTypeError                                 Traceback (most recent call last)\r\n in ()\r\n----> 1 glue_score = glue_metric.compute(predictions, references)\r\n\r\n6 frames\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/metric.py in compute(self, predictions, references, timeout, **metrics_kwargs)\r\n    191         \"\"\"\r\n    192         if predictions is not None:\r\n--> 193             self.add_batch(predictions=predictions, references=references)\r\n    194         self.finalize(timeout=timeout)\r\n    195 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/metric.py in add_batch(self, predictions, references, **kwargs)\r\n    207         if self.writer is None:\r\n    208             self._init_writer()\r\n--> 209         self.writer.write_batch(batch)\r\n    210 \r\n    211     def add(self, prediction=None, reference=None, **kwargs):\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/arrow_writer.py in write_batch(self, batch_examples, writer_batch_size)\r\n    155         if self.pa_writer is None:\r\n    156             self._build_writer(pa_table=pa.Table.from_pydict(batch_examples))\r\n--> 157         pa_table: pa.Table = pa.Table.from_pydict(batch_examples, schema=self._schema)\r\n    158         if writer_batch_size is None:\r\n    159             writer_batch_size = self.writer_batch_size\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/pyarrow\/types.pxi in __iter__()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/pyarrow\/array.pxi in pyarrow.lib.asarray()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/pyarrow\/array.pxi in pyarrow.lib.array()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/pyarrow\/array.pxi in pyarrow.lib._sequence_to_array()\r\n\r\nTypeError: an integer is required (got type str)\r\n```\r\nI'm not sure whether I'm doing this wrong or whether it's an issue. I would like to know a workaround. Thank you.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/324\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/324\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/323","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/323\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/323\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/323\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/323","id":647521308,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQxNTMxOTY3","number":323,"title":"Add package path to sys when downloading package as github archive","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-29T16:46:01Z","updated_at":"2020-07-30T14:00:23Z","closed_at":"2020-07-30T14:00:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/323","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/323","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/323.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/323.patch","merged_at":null},"body":"This fixes the `coval.py` metric so that imports within the downloaded module work correctly. We can use a similar trick to add the BLEURT metric (@ankparikh)\r\n\r\n@thomwolf not sure how you feel about adding to the `PYTHONPATH` from the script. This is the only way I could make it work with my understanding of `importlib` but there might be a more elegant method.\r\n\r\nThis PR fixes https:\/\/github.com\/huggingface\/nlp\/issues\/305","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/323\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/323\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/322","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/322\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/322\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/322\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/322","id":647483850,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQxNTAyMjc2","number":322,"title":"output nested dict in get_nearest_examples","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-29T15:47:47Z","updated_at":"2020-07-02T08:33:33Z","closed_at":"2020-07-02T08:33:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/322","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/322","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/322.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/322.patch","merged_at":"2020-07-02T08:33:32Z"},"body":"As we are using a columnar format like arrow as the backend for datasets, we expect to have a dictionary of columns when we slice a dataset like in this example:\r\n```python\r\nmy_examples = dataset[0:10]\r\nprint(type(my_examples))\r\n# >>> dict\r\nprint(my_examples[\"my_column\"][0]\r\n# >>> this is the first element of the column 'my_column'\r\n```\r\n\r\nTherefore I wanted to keep this logic when calling `get_nearest_examples` that returns the top 10 nearest examples:\r\n```python\r\ndataset.add_faiss_index(column=\"embeddings\")\r\nscores, examples = dataset.get_nearest_examples(\"embeddings\", query=my_numpy_embedding)\r\nprint(type(examples))\r\n# >>> dict\r\n```\r\n\r\nPreviously it was returning a list[dict]. It was the only place that was using this output format.\r\n\r\nTo make it work I had to implement `__getitem__(key)` where `key` is a list.\r\nThis is different from `.select` because `.select` is a dataset transform (it returns a new dataset object) while `__getitem__` is an extraction method (it returns python dictionaries).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/322\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/322\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/321","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/321\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/321\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/321\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/321","id":647271526,"node_id":"MDU6SXNzdWU2NDcyNzE1MjY=","number":321,"title":"ERROR:root:mwparserfromhell","user":{"login":"Shiro-LK","id":26505641,"node_id":"MDQ6VXNlcjI2NTA1NjQx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26505641?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Shiro-LK","html_url":"https:\/\/github.com\/Shiro-LK","followers_url":"https:\/\/api.github.com\/users\/Shiro-LK\/followers","following_url":"https:\/\/api.github.com\/users\/Shiro-LK\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Shiro-LK\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Shiro-LK\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Shiro-LK\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Shiro-LK\/orgs","repos_url":"https:\/\/api.github.com\/users\/Shiro-LK\/repos","events_url":"https:\/\/api.github.com\/users\/Shiro-LK\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Shiro-LK\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-06-29T11:10:43Z","updated_at":"2020-07-23T16:28:34Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI am trying to download some wikipedia data but I got this error for spanish \"es\" (but there are maybe some others languages which have the same error I haven't tried all of them ).\r\n\r\n`ERROR:root:mwparserfromhell ParseError: This is a bug and should be reported. Info: C tokenizer exited with non-empty token stack.`\r\n\r\nThe code I have use was : \r\n`dataset = load_dataset('wikipedia', '20200501.es', beam_runner='DirectRunner')`\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/321\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/321\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/320","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/320\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/320\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/320\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/320","id":647188167,"node_id":"MDU6SXNzdWU2NDcxODgxNjc=","number":320,"title":"Blog Authorship Corpus, Non Matching Splits Sizes Error, nlp viewer","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-29T07:36:35Z","updated_at":"2020-06-29T14:44:42Z","closed_at":"2020-06-29T14:44:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Selecting `blog_authorship_corpus` in the nlp viewer throws the following error: \r\n\r\n```\r\nNonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=610252351, num_examples=532812, dataset_name='blog_authorship_corpus'), 'recorded': SplitInfo(name='train', num_bytes=614706451, num_examples=535568, dataset_name='blog_authorship_corpus')}, {'expected': SplitInfo(name='validation', num_bytes=37500394, num_examples=31277, dataset_name='blog_authorship_corpus'), 'recorded': SplitInfo(name='validation', num_bytes=32553710, num_examples=28521, dataset_name='blog_authorship_corpus')}]\r\nTraceback:\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/ScriptRunner.py\", line 322, in _run_script\r\n    exec(code, module.__dict__)\r\nFile \"\/home\/sasha\/nlp-viewer\/run.py\", line 172, in \r\n    dts, fail = get(str(option.id), str(conf_option.name) if conf_option else None)\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/caching.py\", line 591, in wrapped_func\r\n    return get_or_create_cached_value()\r\nFile \"\/home\/sasha\/streamlit\/lib\/streamlit\/caching.py\", line 575, in get_or_create_cached_value\r\n    return_value = func(*args, **kwargs)\r\nFile \"\/home\/sasha\/nlp-viewer\/run.py\", line 132, in get\r\n    builder_instance.download_and_prepare()\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 432, in download_and_prepare\r\n    dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 488, in _download_and_prepare\r\n    verify_splits(self.info.splits, split_dict)\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/nlp\/utils\/info_utils.py\", line 70, in verify_splits\r\n    raise NonMatchingSplitsSizesError(str(bad_splits))\r\n```\r\n@srush @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/320\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/320\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/319","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/319\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/319\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/319\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/319","id":646792487,"node_id":"MDU6SXNzdWU2NDY3OTI0ODc=","number":319,"title":"Nested sequences with dicts","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-27T23:45:17Z","updated_at":"2020-07-03T10:22:00Z","closed_at":"2020-07-03T10:22:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Am pretty much finished [adding a dataset](https:\/\/github.com\/ghomasHudson\/nlp\/blob\/DocRED\/datasets\/docred\/docred.py) for [DocRED](https:\/\/github.com\/thunlp\/DocRED), but am getting an error when trying to add a nested `nlp.features.sequence(nlp.features.sequence({key:value,...}))`. \r\n\r\nThe original data is in this format:\r\n```python\r\n{\r\n  'title': \"Title of wiki page\",\r\n  'vertexSet': [\r\n                  [\r\n                    { 'name': \"mention_name\", \r\n                      'sent_id': \"mention in which sentence\", \r\n                      'pos': [\"postion of mention in a sentence\"], \r\n                      'type': \"NER_type\"},\r\n                    {another mention}\r\n                  ], \r\n                  [another entity]\r\n                ]\r\n    ...\r\n}\r\n```\r\nSo to represent this I've attempted to write:\r\n```\r\n...\r\nfeatures=nlp.Features({\r\n    \"title\": nlp.Value(\"string\"),\r\n    \"vertexSet\": nlp.features.Sequence(nlp.features.Sequence({\r\n        \"name\": nlp.Value(\"string\"),\r\n        \"sent_id\": nlp.Value(\"int32\"),\r\n        \"pos\": nlp.features.Sequence(nlp.Value(\"int32\")),\r\n        \"type\": nlp.Value(\"string\"),\r\n    })),\r\n    ...\r\n    }),\r\n...\r\n```\r\nThis is giving me the error:\r\n```\r\npyarrow.lib.ArrowTypeError: Could not convert [{'pos': [[0,2], [2,4], [3,5]], \"type\": [\"ORG\", \"ORG\", \"ORG\"], \"name\": [\"Lark Force\", \"Lark Force\", \"Lark Force\", \"sent_id\": [0, 3, 4]}..... with type list: was not a dict, tuple, or recognized null value for conversion to struct type\r\n```\r\nDo we expect the pyarrow stuff to break when doing this deeper nesting? I've checked that it still works when you do `nlp.features.Sequence(nlp.features.Sequence(nlp.Value(\"string\"))` or `nlp.features.Sequence({key:value,...})` just not nested sequences with a dict.\r\n\r\nIf it's not possible, I can always convert it to a shallower structure. I'd rather not change the DocRED authors' structure if I don't have to though.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/319\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/319\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/318","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/318\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/318\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/318\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/318","id":646682840,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQwOTExOTYy","number":318,"title":"Multitask","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":18,"created_at":"2020-06-27T13:27:29Z","updated_at":"2021-12-02T23:24:49Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/318","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/318","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/318.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/318.patch","merged_at":null},"body":"Following our discussion in #217, I've implemented a first working version of `MultiDataset`.\r\n\r\nThere's a function `build_multitask()` which takes either individual `nlp.Dataset`s or `dicts` of splits and constructs `MultiDataset`(s). I've added a notebook with example usage.\r\n\r\nI've implemented many of the `nlp.Dataset` methods (cache_files, columns, nbytes, num_columns, num_rows, column_names, schema, shape). Some of the other methods are complicated as they change the number of examples. These raise `NotImplementedError`s at the moment.\r\n\r\nThis will need some tests which I haven't written yet.\r\n\r\nThere's definitely room for improvements but I think the general approach is sound. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/318\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/318\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/317","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/317\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/317\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/317\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/317","id":646555384,"node_id":"MDU6SXNzdWU2NDY1NTUzODQ=","number":317,"title":"Adding a dataset with multiple subtasks","user":{"login":"erickrf","id":294483,"node_id":"MDQ6VXNlcjI5NDQ4Mw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/294483?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/erickrf","html_url":"https:\/\/github.com\/erickrf","followers_url":"https:\/\/api.github.com\/users\/erickrf\/followers","following_url":"https:\/\/api.github.com\/users\/erickrf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/erickrf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/erickrf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/erickrf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/erickrf\/orgs","repos_url":"https:\/\/api.github.com\/users\/erickrf\/repos","events_url":"https:\/\/api.github.com\/users\/erickrf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/erickrf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-26T23:14:19Z","updated_at":"2020-10-27T15:36:52Z","closed_at":"2020-10-27T15:36:52Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I intent to add the datasets of the MT Quality Estimation shared tasks to `nlp`. However, they have different subtasks -- such as word-level, sentence-level and document-level quality estimation, each of which having different language pairs, and some of the data reused in different subtasks.\r\n\r\nFor example, in [QE 2019,](http:\/\/www.statmt.org\/wmt19\/qe-task.html) we had the same English-Russian and English-German data for word-level and sentence-level QE. \r\n\r\nI suppose these datasets could have both their word and sentence-level labels inside `nlp.Features`; but what about other subtasks? Should they be considered a different dataset altogether?\r\n\r\nI read the discussion on #217 but the case of QE seems a lot simpler.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/317\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/317\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/316","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/316\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/316\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/316\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/316","id":646366450,"node_id":"MDExOlB1bGxSZXF1ZXN0NDQwNjY5NzY5","number":316,"title":"add AG News dataset","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-26T16:11:58Z","updated_at":"2020-06-30T09:58:08Z","closed_at":"2020-06-30T08:31:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/316","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/316","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/316.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/316.patch","merged_at":"2020-06-30T08:31:55Z"},"body":"adds support for the AG-News topic classification dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/316\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/316\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/315","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/315\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/315\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/315\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/315","id":645888943,"node_id":"MDU6SXNzdWU2NDU4ODg5NDM=","number":315,"title":"[Question] Best way to batch a large dataset?","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-06-25T22:30:20Z","updated_at":"2020-10-27T15:38:17Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm training on large datasets such as Wikipedia and BookCorpus. Following the instructions in [the tutorial notebook](https:\/\/colab.research.google.com\/github\/huggingface\/nlp\/blob\/master\/notebooks\/Overview.ipynb), I see the following recommended for TensorFlow:\r\n\r\n```python\r\ntrain_tf_dataset = train_tf_dataset.filter(remove_none_values, load_from_cache_file=False)\r\ncolumns = ['input_ids', 'token_type_ids', 'attention_mask', 'start_positions', 'end_positions']\r\ntrain_tf_dataset.set_format(type='tensorflow', columns=columns)\r\nfeatures = {x: train_tf_dataset[x].to_tensor(default_value=0, shape=[None, tokenizer.max_len]) for x in columns[:3]} \r\nlabels = {\"output_1\": train_tf_dataset[\"start_positions\"].to_tensor(default_value=0, shape=[None, 1])}\r\nlabels[\"output_2\"] = train_tf_dataset[\"end_positions\"].to_tensor(default_value=0, shape=[None, 1])\r\n### Question about this last line ###\r\ntfdataset = tf.data.Dataset.from_tensor_slices((features, labels)).batch(8)\r\n```\r\n\r\nThis code works for something like WikiText-2. However, scaling up to WikiText-103, the last line takes 5-10 minutes to run. I assume it is because tf.data.Dataset.from_tensor_slices() is pulling everything into memory, not lazily loading. This approach won't scale up to datasets 25x larger such as Wikipedia.\r\n\r\nSo I tried manual batching using `dataset.select()`:\r\n\r\n```python\r\nidxs = np.random.randint(len(dataset), size=bsz)\r\nbatch = dataset.select(idxs).map(lambda example: {\"input_ids\": tokenizer(example[\"text\"])})\r\ntf_batch = tf.constant(batch[\"ids\"], dtype=tf.int64)\r\n```\r\n\r\nThis appears to create a new Apache Arrow dataset with every batch I grab, and then tries to cache it. The runtime of `dataset.select([0, 1])` appears to be much worse than `dataset[:2]`. So using `select()` doesn't seem to be performant enough for a training loop.\r\n\r\nIs there a performant scalable way to lazily load batches of nlp Datasets?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/315\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/315\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/314","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/314\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/314\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/314\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/314","id":645461174,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM5OTM4MTMw","number":314,"title":"Fixed singlular very minor spelling error","user":{"login":"SchizoidBat","id":40696362,"node_id":"MDQ6VXNlcjQwNjk2MzYy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40696362?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SchizoidBat","html_url":"https:\/\/github.com\/SchizoidBat","followers_url":"https:\/\/api.github.com\/users\/SchizoidBat\/followers","following_url":"https:\/\/api.github.com\/users\/SchizoidBat\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SchizoidBat\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SchizoidBat\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SchizoidBat\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SchizoidBat\/orgs","repos_url":"https:\/\/api.github.com\/users\/SchizoidBat\/repos","events_url":"https:\/\/api.github.com\/users\/SchizoidBat\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SchizoidBat\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-25T10:45:59Z","updated_at":"2020-06-26T08:46:41Z","closed_at":"2020-06-25T12:43:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/314","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/314","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/314.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/314.patch","merged_at":"2020-06-25T12:43:59Z"},"body":"An instance of \"independantly\" was changed to \"independently\". That's all.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/314\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/314\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/313","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/313\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/313\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/313\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/313","id":645390088,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM5ODc4MDg5","number":313,"title":"Add MWSC","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2020-06-25T09:22:02Z","updated_at":"2020-06-30T08:28:11Z","closed_at":"2020-06-30T08:28:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/313","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/313","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/313.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/313.patch","merged_at":"2020-06-30T08:28:10Z"},"body":"Adding the [Modified Winograd Schema Challenge](https:\/\/github.com\/salesforce\/decaNLP\/blob\/master\/local_data\/schema.txt) dataset which formed part of the [decaNLP](http:\/\/decanlp.com\/) benchmark. Not sure how much use people would find for it it outside of the benchmark, but it is general purpose.\r\n\r\nCode is heavily borrowed from the [decaNLP repo](https:\/\/github.com\/salesforce\/decaNLP\/blob\/1e9605f246b9e05199b28bde2a2093bc49feeeaa\/text\/torchtext\/datasets\/generic.py#L773-L877).\r\n\r\nThere's a few (possibly overly opinionated) design choices I made:\r\n\r\n- I used the train\/test\/dev split [buried in the decaNLP code](https:\/\/github.com\/salesforce\/decaNLP\/blob\/1e9605f246b9e05199b28bde2a2093bc49feeeaa\/text\/torchtext\/datasets\/generic.py#L852-L855)\r\n- I split out each example into the 2 alternatives. Originally the data uses the format:\r\n    ```\r\n    The city councilmen refused the demonstrators a permit because they [feared\/advocated] violence. \r\n    Who [feared\/advocated] violence? \r\n    councilmen\/demonstrators\r\n    ```\r\n    I split into the 2 variants:\r\n    ```\r\n    The city councilmen refused the demonstrators a permit because they feared violence. \r\n    Who feared violence? \r\n    councilmen\/demonstrators\r\n    \r\n    The city councilmen refused the demonstrators a permit because they advocated violence. \r\n    Who advocated violence? \r\n    councilmen\/demonstrators\r\n    ```\r\n    I can't see any use for having the options combined into a single example (splitting them is [the way decaNLP processes](https:\/\/github.com\/salesforce\/decaNLP\/blob\/1e9605f246b9e05199b28bde2a2093bc49feeeaa\/text\/torchtext\/datasets\/generic.py#L846-L850)) them. You can't train on both versions with them combined, and splitting the examples later would be a pain to do. I think [winogrande.py](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/datasets\/winogrande\/winogrande.py) presents the data in this way?\r\n\r\n- I've not used the decaNLP framing (appending the options to the question e.g. `Who feared violence? \r\n -- councilmen or demonstrators?`) but left it more generic by adding the options as a new key: `\"options\":[\"councilmen\",\"demonstrators\"]` This should be an easy thing to change using `map` if needed by a specific application.\r\n\r\nDataset is working as-is but if anyone has any thoughts\/preferences on the design decisions here I'm definitely open to different choices.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/313\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/313\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/312","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/312\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/312\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/312\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/312","id":645025561,"node_id":"MDU6SXNzdWU2NDUwMjU1NjE=","number":312,"title":"[Feature request] Add `shard()` method to dataset","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-24T22:48:33Z","updated_at":"2020-07-06T12:35:36Z","closed_at":"2020-07-06T12:35:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, to shard a dataset into 10 pieces on different ranks, you can run\r\n\r\n```python\r\nrank = 3 # for example\r\nsize = 10\r\ndataset = nlp.load_dataset('wikitext', 'wikitext-2-raw-v1', split=f\"train[{rank*10}%:{(rank+1)*10}%]\")\r\n```\r\n\r\nHowever, this breaks down if you have a number of ranks that doesn't divide cleanly into 100, such as 64 ranks. Is there interest in adding a method shard() that looks like this?\r\n\r\n```python\r\nrank = 3\r\nsize = 64\r\ndataset = nlp.load_dataset(\"wikitext\", \"wikitext-2-raw-v1\", split=\"train\").shard(rank=rank, size=size)\r\n```\r\n\r\nTensorFlow has a similar API: https:\/\/www.tensorflow.org\/api_docs\/python\/tf\/data\/Dataset#shard. I'd be happy to contribute this code.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/312\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/312\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/311","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/311\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/311\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/311\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/311","id":645013131,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM5NTQ3OTg0","number":311,"title":"Add qa_zre","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-24T22:17:22Z","updated_at":"2020-06-29T16:37:38Z","closed_at":"2020-06-29T16:37:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/311","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/311","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/311.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/311.patch","merged_at":"2020-06-29T16:37:38Z"},"body":"Adding the QA-ZRE dataset from [\"Zero-Shot Relation Extraction via Reading Comprehension\"](http:\/\/nlp.cs.washington.edu\/zeroshot\/).\r\n\r\nA common processing step seems to be replacing the `XXX` placeholder with the `subject`. I've left this out as it's something you could easily do with `map`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/311\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/311\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/310","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/310\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/310\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/310\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/310","id":644806720,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM5MzY1MDg5","number":310,"title":"add wikisql","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-24T18:00:35Z","updated_at":"2020-06-25T12:32:25Z","closed_at":"2020-06-25T12:32:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/310","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/310","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/310.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/310.patch","merged_at":"2020-06-25T12:32:25Z"},"body":"Adding the [WikiSQL](https:\/\/github.com\/salesforce\/WikiSQL) dataset.\r\n\r\nInteresting things to note:\r\n- Have copied the function (`_convert_to_human_readable`) which converts the SQL query to a human-readable (string) format as this is what most people will want when actually using this dataset for NLP applications.\r\n- `conds` was originally a tuple but is converted to a dictionary to support differing types.\r\n\r\nWould be nice to add the logical_form metrics too at some point.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/310\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/310\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/309","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/309\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/309\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/309\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/309","id":644783822,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM5MzQ1NzYz","number":309,"title":"Add narrative qa","user":{"login":"Varal7","id":8019486,"node_id":"MDQ6VXNlcjgwMTk0ODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8019486?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Varal7","html_url":"https:\/\/github.com\/Varal7","followers_url":"https:\/\/api.github.com\/users\/Varal7\/followers","following_url":"https:\/\/api.github.com\/users\/Varal7\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Varal7\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Varal7\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Varal7\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Varal7\/orgs","repos_url":"https:\/\/api.github.com\/users\/Varal7\/repos","events_url":"https:\/\/api.github.com\/users\/Varal7\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Varal7\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-06-24T17:26:18Z","updated_at":"2020-09-03T09:02:10Z","closed_at":"2020-09-03T09:02:09Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/309","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/309","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/309.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/309.patch","merged_at":null},"body":"Test cases for dummy data don't pass\r\n\r\nOnly contains data for summaries (not whole story)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/309\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/309\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/308","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/308\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/308\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/308\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/308","id":644195251,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM4ODYyMzYy","number":308,"title":"Specify utf-8 encoding for MRPC files","user":{"login":"patpizio","id":15801338,"node_id":"MDQ6VXNlcjE1ODAxMzM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15801338?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patpizio","html_url":"https:\/\/github.com\/patpizio","followers_url":"https:\/\/api.github.com\/users\/patpizio\/followers","following_url":"https:\/\/api.github.com\/users\/patpizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patpizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patpizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patpizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patpizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/patpizio\/repos","events_url":"https:\/\/api.github.com\/users\/patpizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patpizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-23T22:44:36Z","updated_at":"2020-06-25T12:52:21Z","closed_at":"2020-06-25T12:16:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/308","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/308","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/308.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/308.patch","merged_at":"2020-06-25T12:16:09Z"},"body":"Fixes #307, again probably a Windows-related issue.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/308\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/308\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/307","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/307\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/307\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/307\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/307","id":644187262,"node_id":"MDU6SXNzdWU2NDQxODcyNjI=","number":307,"title":"Specify encoding for MRPC","user":{"login":"patpizio","id":15801338,"node_id":"MDQ6VXNlcjE1ODAxMzM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15801338?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patpizio","html_url":"https:\/\/github.com\/patpizio","followers_url":"https:\/\/api.github.com\/users\/patpizio\/followers","following_url":"https:\/\/api.github.com\/users\/patpizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patpizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patpizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patpizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patpizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/patpizio\/repos","events_url":"https:\/\/api.github.com\/users\/patpizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patpizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-23T22:24:49Z","updated_at":"2020-06-25T12:16:09Z","closed_at":"2020-06-25T12:16:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Same as #242, but with MRPC: on Windows, I get a `UnicodeDecodeError` when I try to download the dataset:\r\n```python\r\ndataset = nlp.load_dataset('glue', 'mrpc')\r\n```\r\n\r\n```python\r\nDownloading and preparing dataset glue\/mrpc (download: Unknown size, generated: Unknown size, total: Unknown size) to C:\\Users\\Python\\.cache\\huggingface\\datasets\\glue\\mrpc\\1.0.0...\r\n---------------------------------------------------------------------------\r\nUnicodeDecodeError                        Traceback (most recent call last)\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\builder.py in incomplete_dir(dirname)\r\n    369                 try:\r\n--> 370                     yield tmp_dir\r\n    371                     if os.path.isdir(dirname):\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n    430                 verify_infos = not save_infos and not ignore_verifications\r\n--> 431                 self._download_and_prepare(\r\n    432                     dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n    482                 # Prepare split will record examples associated to the split\r\n--> 483                 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n    484             except OSError:\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\builder.py in _prepare_split(self, split_generator)\r\n    663         generator = self._generate_examples(**split_generator.gen_kwargs)\r\n--> 664         for key, record in utils.tqdm(generator, unit=\" examples\", total=split_info.num_examples, leave=False):\r\n    665             example = self.info.features.encode_example(record)\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\tqdm\\notebook.py in __iter__(self, *args, **kwargs)\r\n    217         try:\r\n--> 218             for obj in super(tqdm_notebook, self).__iter__(*args, **kwargs):\r\n    219                 # return super(tqdm...) will not catch exception\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\tqdm\\std.py in __iter__(self)\r\n   1128         try:\r\n-> 1129             for obj in iterable:\r\n   1130                 yield obj\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\datasets\\glue\\7fc58099eb3983a04c8dac8500b70d27e6eceae63ffb40d7900c977897bb58c6\\glue.py in _generate_examples(self, data_file, split, mrpc_files)\r\n    514             examples = self._generate_example_mrpc_files(mrpc_files=mrpc_files, split=split)\r\n--> 515             for example in examples:\r\n    516                 yield example[\"idx\"], example\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\datasets\\glue\\7fc58099eb3983a04c8dac8500b70d27e6eceae63ffb40d7900c977897bb58c6\\glue.py in _generate_example_mrpc_files(self, mrpc_files, split)\r\n    576                 reader = csv.DictReader(f, delimiter=\"\\t\", quoting=csv.QUOTE_NONE)\r\n--> 577                 for n, row in enumerate(reader):\r\n    578                     is_row_in_dev = [row[\"#1 ID\"], row[\"#2 ID\"]] in dev_ids\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\csv.py in __next__(self)\r\n    110             self.fieldnames\r\n--> 111         row = next(self.reader)\r\n    112         self.line_num = self.reader.line_num\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\encodings\\cp1252.py in decode(self, input, final)\r\n     22     def decode(self, input, final=False):\r\n---> 23         return codecs.charmap_decode(input,self.errors,decoding_table)[0]\r\n     24 \r\n\r\nUnicodeDecodeError: 'charmap' codec can't decode byte 0x9d in position 1180: character maps to \r\n```\r\nThe fix is the same: specify `utf-8` encoding when opening the file. The previous fix didn't work as MRPC's download process is different from the others in GLUE. \r\nI am going to propose a new PR :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/307\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/307\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/306","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/306\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/306\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/306\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/306","id":644176078,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM4ODQ2MTI3","number":306,"title":"add pg19 dataset","user":{"login":"lucidrains","id":108653,"node_id":"MDQ6VXNlcjEwODY1Mw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/108653?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucidrains","html_url":"https:\/\/github.com\/lucidrains","followers_url":"https:\/\/api.github.com\/users\/lucidrains\/followers","following_url":"https:\/\/api.github.com\/users\/lucidrains\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucidrains\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucidrains\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucidrains\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucidrains\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucidrains\/repos","events_url":"https:\/\/api.github.com\/users\/lucidrains\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucidrains\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2020-06-23T22:03:52Z","updated_at":"2020-07-06T07:55:59Z","closed_at":"2020-07-06T07:55:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/306","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/306","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/306.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/306.patch","merged_at":"2020-07-06T07:55:59Z"},"body":"https:\/\/github.com\/huggingface\/nlp\/issues\/274\r\n\r\nAdd functioning PG19 dataset with dummy data\r\n\r\n`cos_e.py` was just auto-linted by `make style`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/306\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/306\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/305","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/305\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/305\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/305\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/305","id":644148149,"node_id":"MDU6SXNzdWU2NDQxNDgxNDk=","number":305,"title":"Importing downloaded package repository fails","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067393914,"node_id":"MDU6TGFiZWwyMDY3MzkzOTE0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20bug","name":"metric bug","color":"25b21e","default":false,"description":"A bug in a metric script"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-23T21:09:05Z","updated_at":"2020-07-30T16:44:23Z","closed_at":"2020-07-30T16:44:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The `get_imports` function in `src\/nlp\/load.py` has a feature to download a package as a zip archive of the github repository and import functions from the unpacked directory. This is used for example in the `metrics\/coval.py` file, and would be useful to add BLEURT (@ankparikh).\r\n\r\nCurrently however, the code seems to have trouble with imports within the package. For example:\r\n```\r\nimport nlp\r\ncoval = nlp.load_metric('coval')\r\n```\r\nyields:\r\n```\r\nTraceback (most recent call last):\r\n  File \"\", line 1, in \r\n  File \"\/home\/yacine\/Code\/nlp\/src\/nlp\/load.py\", line 432, in load_metric\r\n    metric_cls = import_main_class(module_path, dataset=False)\r\n  File \"\/home\/yacine\/Code\/nlp\/src\/nlp\/load.py\", line 57, in import_main_class\r\n    module = importlib.import_module(module_path)\r\n  File \"\/home\/yacine\/anaconda3\/lib\/python3.7\/importlib\/__init__.py\", line 127, in import_module\r\n    return _bootstrap._gcd_import(name[level:], package, level)\r\n  File \"\", line 1006, in _gcd_import\r\n  File \"\", line 983, in _find_and_load\r\n  File \"\", line 967, in _find_and_load_unlocked\r\n  File \"\", line 677, in _load_unlocked\r\n  File \"\", line 728, in exec_module\r\n  File \"\", line 219, in _call_with_frames_removed\r\n  File \"\/home\/yacine\/Code\/nlp\/src\/nlp\/metrics\/coval\/a78807df33ac45edbb71799caf2b3b47e55df4fd690267808fe963a5e8b30952\/coval.py\", line 21, in \r\n    from .coval_backend.conll import reader  # From: https:\/\/github.com\/ns-moosavi\/coval\r\n  File \"\/home\/yacine\/Code\/nlp\/src\/nlp\/metrics\/coval\/a78807df33ac45edbb71799caf2b3b47e55df4fd690267808fe963a5e8b30952\/coval_backend\/conll\/reader.py\", line 2, in \r\n    from conll import mention\r\nModuleNotFoundError: No module named 'conll'\r\n```\r\n\r\nNot sure what the fix would be there.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/305\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":1,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/305\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/304","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/304\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/304\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/304\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/304","id":644091970,"node_id":"MDU6SXNzdWU2NDQwOTE5NzA=","number":304,"title":"Problem while printing doc string when instantiating multiple metrics.","user":{"login":"codehunk628","id":51091425,"node_id":"MDQ6VXNlcjUxMDkxNDI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/51091425?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/codehunk628","html_url":"https:\/\/github.com\/codehunk628","followers_url":"https:\/\/api.github.com\/users\/codehunk628\/followers","following_url":"https:\/\/api.github.com\/users\/codehunk628\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/codehunk628\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/codehunk628\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/codehunk628\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/codehunk628\/orgs","repos_url":"https:\/\/api.github.com\/users\/codehunk628\/repos","events_url":"https:\/\/api.github.com\/users\/codehunk628\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/codehunk628\/received_events","type":"User","site_admin":false},"labels":[{"id":2067393914,"node_id":"MDU6TGFiZWwyMDY3MzkzOTE0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20bug","name":"metric bug","color":"25b21e","default":false,"description":"A bug in a metric script"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-23T19:32:05Z","updated_at":"2020-07-22T09:50:58Z","closed_at":"2020-07-22T09:50:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I load more than one metric and try to print doc string of a particular metric,. It shows the doc strings of all imported metric one after the other which looks quite confusing and clumsy.\r\nAttached [Colab](https:\/\/colab.research.google.com\/drive\/13H0ZgyQ2se0mqJ2yyew0bNEgJuHaJ8H3?usp=sharing) Notebook for problem clarification..","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/304\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/304\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/303","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/303\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/303\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/303\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/303","id":643912464,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM4NjI3Nzcw","number":303,"title":"allow to move files across file systems","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-23T14:56:08Z","updated_at":"2020-06-23T15:08:44Z","closed_at":"2020-06-23T15:08:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/303","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/303","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/303.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/303.patch","merged_at":"2020-06-23T15:08:43Z"},"body":"Users are allowed to use the `cache_dir` that they want.\r\nTherefore it can happen that we try to move files across filesystems.\r\nWe were using `os.rename` that doesn't allow that, so I changed some of them to `shutil.move`.\r\n\r\nThis should fix #301","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/303\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/303\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/302","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/302\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/302\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/302\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/302","id":643910418,"node_id":"MDU6SXNzdWU2NDM5MTA0MTg=","number":302,"title":"Question - Sign Language Datasets","user":{"login":"AmitMY","id":5757359,"node_id":"MDQ6VXNlcjU3NTczNTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5757359?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AmitMY","html_url":"https:\/\/github.com\/AmitMY","followers_url":"https:\/\/api.github.com\/users\/AmitMY\/followers","following_url":"https:\/\/api.github.com\/users\/AmitMY\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AmitMY\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AmitMY\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AmitMY\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AmitMY\/orgs","repos_url":"https:\/\/api.github.com\/users\/AmitMY\/repos","events_url":"https:\/\/api.github.com\/users\/AmitMY\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AmitMY\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-06-23T14:53:40Z","updated_at":"2020-11-25T11:25:33Z","closed_at":"2020-11-25T11:25:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"An emerging field in NLP is SLP - sign language processing.\r\n\r\nI was wondering about adding datasets here, specifically because it's shaping up to be large and easily usable.\r\nThe metrics for sign language to text translation are the same.\r\n\r\nSo, what do you think about (me, or others) adding datasets here?\r\n\r\n\r\nAn example dataset would be [RWTH-PHOENIX-Weather 2014 T](https:\/\/www-i6.informatik.rwth-aachen.de\/~koller\/RWTH-PHOENIX-2014-T\/)\r\nFor every item in the dataset, the data object includes:\r\n1. video_path - path to mp4 file\r\n2. pose_path - a path to `.pose` file with human pose landmarks\r\n3. openpose_path - a path to a `.json` file with human pose landmarks\r\n4. gloss - string\r\n5. text - string\r\n6. video_metadata - height, width, frames, framerate\r\n\r\n\r\n------\r\n\r\nTo make it a tad more complicated - what if sign language libraries add requirements to `nlp`? for example, sign language is commonly annotated using `ilex`, `eaf`, or `srt` files, which are all loadable as text, but there is no reason for the dataset to parse that file by itself, if libraries exist to do so.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/302\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/302\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/301","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/301\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/301\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/301\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/301","id":643763525,"node_id":"MDU6SXNzdWU2NDM3NjM1MjU=","number":301,"title":"Setting cache_dir gives error on wikipedia download","user":{"login":"hallvagi","id":33862536,"node_id":"MDQ6VXNlcjMzODYyNTM2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33862536?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hallvagi","html_url":"https:\/\/github.com\/hallvagi","followers_url":"https:\/\/api.github.com\/users\/hallvagi\/followers","following_url":"https:\/\/api.github.com\/users\/hallvagi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hallvagi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hallvagi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hallvagi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hallvagi\/orgs","repos_url":"https:\/\/api.github.com\/users\/hallvagi\/repos","events_url":"https:\/\/api.github.com\/users\/hallvagi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hallvagi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-23T11:31:44Z","updated_at":"2020-06-24T07:05:07Z","closed_at":"2020-06-24T07:05:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"First of all thank you for a super handy library! I'd like to download large files to a specific drive so I set `cache_dir=my_path`. This works fine with e.g. imdb and squad. But on wikipedia I get an error:\r\n```\r\nnlp.load_dataset('wikipedia', '20200501.de', split = 'train', cache_dir=my_path)\r\n```\r\n```\r\nOSError                                   Traceback (most recent call last)\r\n in \r\n      1 import nlp\r\n----> 2 nlp.load_dataset('wikipedia', '20200501.de', split = 'train', cache_dir=path)\r\n\r\n~\/anaconda3\/envs\/fastai2\/lib\/python3.7\/site-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n    522         download_mode=download_mode,\r\n    523         ignore_verifications=ignore_verifications,\r\n--> 524         save_infos=save_infos,\r\n    525     )\r\n    526 \r\n\r\n~\/anaconda3\/envs\/fastai2\/lib\/python3.7\/site-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n    385                     with utils.temporary_assignment(self, \"_cache_dir\", tmp_data_dir):\r\n    386                         reader = ArrowReader(self._cache_dir, self.info)\r\n--> 387                         reader.download_from_hf_gcs(self._cache_dir, self._relative_data_dir(with_version=True))\r\n    388                         downloaded_info = DatasetInfo.from_directory(self._cache_dir)\r\n    389                         self.info.update(downloaded_info)\r\n\r\n~\/anaconda3\/envs\/fastai2\/lib\/python3.7\/site-packages\/nlp\/arrow_reader.py in download_from_hf_gcs(self, cache_dir, relative_data_dir)\r\n    231             remote_dataset_info = os.path.join(remote_cache_dir, \"dataset_info.json\")\r\n    232             downloaded_dataset_info = cached_path(remote_dataset_info)\r\n--> 233             os.rename(downloaded_dataset_info, os.path.join(cache_dir, \"dataset_info.json\"))\r\n    234             if self._info is not None:\r\n    235                 self._info.update(self._info.from_directory(cache_dir))\r\n\r\nOSError: [Errno 18] Invalid cross-device link: '\/home\/local\/NTU\/nn\/.cache\/huggingface\/datasets\/025fa4fd4f04aaafc9e939260fbc8f0bb190ce14c61310c8ae1ddd1dcb31f88c.9637f367b6711a79ca478be55fe6989b8aea4941b7ef7adc67b89ff403020947' -> '\/data\/nn\/nlp\/wikipedia\/20200501.de\/1.0.0.incomplete\/dataset_info.json'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/301\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/301\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/300","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/300\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/300\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/300\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/300","id":643688304,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM4NDQ4Mjk1","number":300,"title":"Fix bertscore references","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-23T09:38:59Z","updated_at":"2020-06-23T14:47:38Z","closed_at":"2020-06-23T14:47:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/300","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/300","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/300.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/300.patch","merged_at":"2020-06-23T14:47:36Z"},"body":"I added some type checking for metrics. There was an issue where a metric could interpret a string a a list. A `ValueError` is raised if a string is given instead of a list.\r\n\r\nMoreover I added support for both strings and lists of strings for `references` in `bertscore`, as it is the case in the original code.\r\n\r\nBoth ways work:\r\n```\r\nimport nlp\r\n\r\nscorer = nlp.load_metric(\"bertscore\")\r\nwith open(\"pred.txt\") as p, open(\"ref.txt\") as g:\r\n    for lp, lg in zip(p, g):\r\n        scorer.add(lp, [lg])\r\nscore = scorer.compute(lang=\"en\")\r\n```\r\n\r\n```\r\nimport nlp\r\n\r\nscorer = nlp.load_metric(\"bertscore\")\r\nwith open(\"pred.txt\") as p, open(\"ref.txt\") as g:\r\n    for lp, lg in zip(p, g):\r\n        scorer.add(lp, lg)\r\nscore = scorer.compute(lang=\"en\")\r\n```\r\n\r\nThis should fix #295 and #238 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/300\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/300\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/299","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/299\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/299\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/299\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/299","id":643611557,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM4Mzg0NDgw","number":299,"title":"remove some print in snli file","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-23T07:46:06Z","updated_at":"2020-06-23T08:10:46Z","closed_at":"2020-06-23T08:10:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/299","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/299","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/299.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/299.patch","merged_at":"2020-06-23T08:10:44Z"},"body":"This PR removes unwanted  `print` statements in some files such as `snli.py`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/299\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/299\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/298","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/298\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/298\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/298\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/298","id":643603804,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM4Mzc4MDM4","number":298,"title":"Add searchable datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2020-06-23T07:33:03Z","updated_at":"2020-06-26T07:50:44Z","closed_at":"2020-06-26T07:50:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/298","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/298","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/298.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/298.patch","merged_at":"2020-06-26T07:50:43Z"},"body":"# Better support for Numpy format + Add Indexed Datasets\r\n\r\nI was working on adding Indexed Datasets but in the meantime I had to also add more support for Numpy arrays in the lib.\r\n\r\n## Better support for Numpy format\r\n\r\nNew features:\r\n- New fast method to convert Numpy arrays from Arrow structure (up to x100 speed up) using Pandas.\r\n- Allow to output Numpy arrays in batched `.map`, which was the only missing part to fully support Numpy arrays.\r\n\r\nPandas offers fast zero-copy Numpy arrays conversion from Arrow structures.\r\nUsing it we can speed up the reading of memory-mapped Numpy array stored in Arrow format.\r\n\r\nWith these changes you can easily compute embeddings of texts using `.map()`. For example:\r\n```python\r\ndef embed(text):\r\n    tokenized_example = tokenizer.encode(text, return_tensors=\"pt\")\r\n    embeddings = bert_encoder(tokenized_examples).numpy()\r\n    return embeddings\r\ndset_with_embeddings = dset.map(lambda example: {\"embeddings\": embed(example[\"text])})\r\n```\r\nAnd then reading the embeddings from the arrow format is be very fast.\r\n\r\nPS1: Note that right now only 1d arrays are supported.\r\nPS2: It seems possible to do without pandas but it will require more _trickery_.\r\nPS3: I did a simple benchmark with google colab that you can view here:\r\nhttps:\/\/colab.research.google.com\/drive\/1QlLTR6LRwYOKGJ-hTHmHyolE3wJzvfFg?usp=sharing\r\n\r\n## Add Indexed Datasets\r\n\r\nFor many retrieval tasks it is convenient to index a dataset to be able to run fast queries.\r\nFor example for models like DPR, REALM, RAG etc. that are models for Open Domain QA, the retrieval step is very important.\r\n\r\nTherefore I added two ways to add an index to a column of a dataset:\r\n1) You can index it using a Dense Index like Faiss. It is used to index vectors.\r\n    Faiss is a library for efficient similarity search and clustering of dense vectors.\r\n    It contains algorithms that search in sets of vectors of any size, up to ones that possibly do not fit in RAM.\r\n2) You can index it using a Sparse Index like Elasticsearch. It is used to index text and run queries based on BM25 similarity.\r\n\r\nExample of usage:\r\n\r\n```python\r\nds = nlp.load_dataset('crime_and_punish', split='train')\r\nds_with_embeddings = ds.map(lambda example: {'embeddings': embed(example['line']}))  # `embed` outputs a `np.array`\r\nds_with_embeddings.add_vector_index(column='embeddings')\r\nscores, retrieved_examples = ds_with_embeddings.get_nearest(column='embeddings', query=embed('my new query'), k=10)\r\n```\r\n\r\n```python\r\nds = nlp.load_dataset('crime_and_punish', split='train')\r\nes_client = elasticsearch.Elasticsearch()\r\nds.add_text_index(column='line', es_client=es_client, index_name=\"my_es_index\")\r\nscores, retrieved_examples = ds.get_nearest(column='line', query='my new query', k=10)\r\n```\r\n\r\nPS4: Faiss allows to specify many options for the [index](https:\/\/github.com\/facebookresearch\/faiss\/wiki\/The-index-factory) and for [GPU settings](https:\/\/github.com\/facebookresearch\/faiss\/wiki\/Faiss-on-the-GPU). I made sure that the user has full control over those settings.\r\n\r\n## Tests\r\n\r\nI added tests for Faiss, Elasticsearch and indexed datasets.\r\nI had to edit the CI config because all the test scripts were not being run by CircleCI.\r\n\r\n------------------\r\n\r\nI'd be really happy to have some feedbacks :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/298\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/298\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/297","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/297\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/297\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/297\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/297","id":643444625,"node_id":"MDU6SXNzdWU2NDM0NDQ2MjU=","number":297,"title":"Error in Demo for Specific Datasets","user":{"login":"s-jse","id":60150701,"node_id":"MDQ6VXNlcjYwMTUwNzAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/60150701?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/s-jse","html_url":"https:\/\/github.com\/s-jse","followers_url":"https:\/\/api.github.com\/users\/s-jse\/followers","following_url":"https:\/\/api.github.com\/users\/s-jse\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/s-jse\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/s-jse\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/s-jse\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/s-jse\/orgs","repos_url":"https:\/\/api.github.com\/users\/s-jse\/repos","events_url":"https:\/\/api.github.com\/users\/s-jse\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/s-jse\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-06-23T00:38:42Z","updated_at":"2020-07-17T17:43:06Z","closed_at":"2020-07-17T17:43:06Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Selecting `natural_questions` or `newsroom` dataset in the online demo results in an error similar to the following.\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/60150701\/85347842-ac861900-b4ae-11ea-98c4-a53a00934783.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/297\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/297\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/296","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/296\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/296\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/296\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/296","id":643423717,"node_id":"MDU6SXNzdWU2NDM0MjM3MTc=","number":296,"title":"snli -1 labels","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-06-22T23:33:30Z","updated_at":"2020-06-23T14:41:59Z","closed_at":"2020-06-23T14:41:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to train a model on the SNLI dataset. Why does it have so many -1 labels?\r\n```\r\nimport nlp\r\nfrom collections import Counter\r\ndata = nlp.load_dataset('snli')['train']\r\nprint(Counter(data['label']))\r\nCounter({0: 183416, 2: 183187, 1: 182764, -1: 785})\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/296\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/296\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/295","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/295\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/295\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/295\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/295","id":643245412,"node_id":"MDU6SXNzdWU2NDMyNDU0MTI=","number":295,"title":"Improve input warning for evaluation metrics","user":{"login":"Tiiiger","id":19514537,"node_id":"MDQ6VXNlcjE5NTE0NTM3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19514537?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Tiiiger","html_url":"https:\/\/github.com\/Tiiiger","followers_url":"https:\/\/api.github.com\/users\/Tiiiger\/followers","following_url":"https:\/\/api.github.com\/users\/Tiiiger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Tiiiger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Tiiiger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Tiiiger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Tiiiger\/orgs","repos_url":"https:\/\/api.github.com\/users\/Tiiiger\/repos","events_url":"https:\/\/api.github.com\/users\/Tiiiger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Tiiiger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-22T17:28:57Z","updated_at":"2020-06-23T14:47:37Z","closed_at":"2020-06-23T14:47:37Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, \r\n\r\nI am the author of `bert_score`. Recently, we received [ an issue ](https:\/\/github.com\/Tiiiger\/bert_score\/issues\/62) reporting a problem in using `bert_score` from the `nlp` package (also see #238 in this repo).  After looking into this, I realized that the problem arises from the format `nlp.Metric` takes input. \r\n\r\nHere is a minimal example:\r\n```python\r\nimport nlp\r\n\r\nscorer = nlp.load_metric(\"bertscore\")\r\nwith open(\"pred.txt\") as p, open(\"ref.txt\") as g:\r\n    for lp, lg in zip(p, g):\r\n        scorer.add(lp, lg)\r\nscore = scorer.compute(lang=\"en\")\r\n```\r\n\r\nThe problem in the above code is that `scorer.add()` expects a list of strings as input for the references. As a result, the `scorer` here would take a list of characters in `lg` to be the references. The correct implementation would be calling\r\n```python\r\nscorer.add(lp, [lg])\r\n```\r\n\r\nI just want to raise this issue to you to prevent future user errors of a similar kind. I assume some simple type checking can prevent this from happening?\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/295\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/295\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/294","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/294\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/294\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/294\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/294","id":643181179,"node_id":"MDU6SXNzdWU2NDMxODExNzk=","number":294,"title":"Cannot load arxiv dataset on MacOS?","user":{"login":"JohnGiorgi","id":8917831,"node_id":"MDQ6VXNlcjg5MTc4MzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8917831?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JohnGiorgi","html_url":"https:\/\/github.com\/JohnGiorgi","followers_url":"https:\/\/api.github.com\/users\/JohnGiorgi\/followers","following_url":"https:\/\/api.github.com\/users\/JohnGiorgi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JohnGiorgi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JohnGiorgi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JohnGiorgi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JohnGiorgi\/orgs","repos_url":"https:\/\/api.github.com\/users\/JohnGiorgi\/repos","events_url":"https:\/\/api.github.com\/users\/JohnGiorgi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JohnGiorgi\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-06-22T15:46:55Z","updated_at":"2020-06-30T15:25:10Z","closed_at":"2020-06-30T15:25:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am having trouble loading the `\"arxiv\"` config from the `\"scientific_papers\"` dataset on MacOS. When I try loading the dataset with:\r\n\r\n```python\r\narxiv = nlp.load_dataset(\"scientific_papers\", \"arxiv\")\r\n```\r\n\r\nI get the following stack trace:\r\n\r\n```bash\r\nJSONDecodeError                           Traceback (most recent call last)\r\n in \r\n----> 1 arxiv = nlp.load_dataset(\"scientific_papers\", \"arxiv\")\r\n\r\n~\/miniconda3\/envs\/t2t\/lib\/python3.7\/site-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n    522         download_mode=download_mode,\r\n    523         ignore_verifications=ignore_verifications,\r\n--> 524         save_infos=save_infos,\r\n    525     )\r\n    526 \r\n\r\n~\/miniconda3\/envs\/t2t\/lib\/python3.7\/site-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n    430                 verify_infos = not save_infos and not ignore_verifications\r\n    431                 self._download_and_prepare(\r\n--> 432                     dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n    433                 )\r\n    434                 # Sync info\r\n\r\n~\/miniconda3\/envs\/t2t\/lib\/python3.7\/site-packages\/nlp\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n    481             try:\r\n    482                 # Prepare split will record examples associated to the split\r\n--> 483                 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n    484             except OSError:\r\n    485                 raise OSError(\"Cannot find data file. \" + (self.manual_download_instructions or \"\"))\r\n\r\n~\/miniconda3\/envs\/t2t\/lib\/python3.7\/site-packages\/nlp\/builder.py in _prepare_split(self, split_generator)\r\n    662 \r\n    663         generator = self._generate_examples(**split_generator.gen_kwargs)\r\n--> 664         for key, record in utils.tqdm(generator, unit=\" examples\", total=split_info.num_examples, leave=False):\r\n    665             example = self.info.features.encode_example(record)\r\n    666             writer.write(example)\r\n\r\n~\/miniconda3\/envs\/t2t\/lib\/python3.7\/site-packages\/tqdm\/std.py in __iter__(self)\r\n   1106                 fp_write=getattr(self.fp, 'write', sys.stderr.write))\r\n   1107 \r\n-> 1108         for obj in iterable:\r\n   1109             yield obj\r\n   1110             # Update and possibly print the progressbar.\r\n\r\n~\/miniconda3\/envs\/t2t\/lib\/python3.7\/site-packages\/nlp\/datasets\/scientific_papers\/107a416c0e1958cb846f5934b5aae292f7884a5b27e86af3f3ef1a093e058bbc\/scientific_papers.py in _generate_examples(self, path)\r\n    114                 # \"section_names\": list[str], list of section names.\r\n    115                 # \"sections\": list[list[str]], list of sections (list of paragraphs)\r\n--> 116                 d = json.loads(line)\r\n    117                 summary = \"\\n\".join(d[\"abstract_text\"])\r\n    118                 # In original paper,  and <\/S> are not used in vocab during training\r\n\r\n~\/miniconda3\/envs\/t2t\/lib\/python3.7\/json\/__init__.py in loads(s, encoding, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)\r\n    346             parse_int is None and parse_float is None and\r\n    347             parse_constant is None and object_pairs_hook is None and not kw):\r\n--> 348         return _default_decoder.decode(s)\r\n    349     if cls is None:\r\n    350         cls = JSONDecoder\r\n\r\n~\/miniconda3\/envs\/t2t\/lib\/python3.7\/json\/decoder.py in decode(self, s, _w)\r\n    335 \r\n    336         \"\"\"\r\n--> 337         obj, end = self.raw_decode(s, idx=_w(s, 0).end())\r\n    338         end = _w(s, end).end()\r\n    339         if end != len(s):\r\n\r\n~\/miniconda3\/envs\/t2t\/lib\/python3.7\/json\/decoder.py in raw_decode(self, s, idx)\r\n    351         \"\"\"\r\n    352         try:\r\n--> 353             obj, end = self.scan_once(s, idx)\r\n    354         except StopIteration as err:\r\n    355             raise JSONDecodeError(\"Expecting value\", s, err.value) from None\r\n\r\nJSONDecodeError: Unterminated string starting at: line 1 column 46983 (char 46982)\r\n\r\n163502 examples [02:10, 2710.68 examples\/s]   \r\n```\r\n\r\nI am not sure how to trace back to the specific JSON file that has the \"Unterminated string\". Also, I do not get this error on colab so I suspect it may be MacOS specific. Copy pasting the relevant lines from `transformers-cli env` below:\r\n\r\n- Platform: Darwin-19.5.0-x86_64-i386-64bit\r\n- Python version: 3.7.5\r\n- PyTorch version (GPU?): 1.5.0 (False)\r\n- Tensorflow version (GPU?): 2.2.0 (False)\r\n\r\nAny ideas?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/294\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/294\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/293","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/293\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/293\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/293\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/293","id":642942182,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM3ODM1ODI4","number":293,"title":"Don't test community datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-22T10:15:33Z","updated_at":"2020-06-22T11:07:00Z","closed_at":"2020-06-22T11:06:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/293","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/293","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/293.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/293.patch","merged_at":"2020-06-22T11:06:59Z"},"body":"This PR disables testing for community datasets on aws.\r\n\r\nIt should fix the CI that is currently failing.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/293\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/293\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/292","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/292\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/292\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/292\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/292","id":642897797,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM3Nzk4NTM2","number":292,"title":"Update metadata for x_stance dataset","user":{"login":"jvamvas","id":5830820,"node_id":"MDQ6VXNlcjU4MzA4MjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5830820?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jvamvas","html_url":"https:\/\/github.com\/jvamvas","followers_url":"https:\/\/api.github.com\/users\/jvamvas\/followers","following_url":"https:\/\/api.github.com\/users\/jvamvas\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jvamvas\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jvamvas\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jvamvas\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jvamvas\/orgs","repos_url":"https:\/\/api.github.com\/users\/jvamvas\/repos","events_url":"https:\/\/api.github.com\/users\/jvamvas\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jvamvas\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-06-22T09:13:26Z","updated_at":"2020-06-23T08:07:24Z","closed_at":"2020-06-23T08:07:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/292","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/292","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/292.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/292.patch","merged_at":"2020-06-23T08:07:24Z"},"body":"Thank you for featuring the x_stance dataset in your library. This PR updates some metadata:\r\n- Citation: Replace preprint with proceedings\r\n- URL: Use a URL with long-term availability\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/292\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/292\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/291","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/291\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/291\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/291\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/291","id":642688450,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM3NjM1NjMy","number":291,"title":"break statement not required","user":{"login":"mayurnewase","id":12967587,"node_id":"MDQ6VXNlcjEyOTY3NTg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12967587?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mayurnewase","html_url":"https:\/\/github.com\/mayurnewase","followers_url":"https:\/\/api.github.com\/users\/mayurnewase\/followers","following_url":"https:\/\/api.github.com\/users\/mayurnewase\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mayurnewase\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mayurnewase\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mayurnewase\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mayurnewase\/orgs","repos_url":"https:\/\/api.github.com\/users\/mayurnewase\/repos","events_url":"https:\/\/api.github.com\/users\/mayurnewase\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mayurnewase\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-06-22T01:40:55Z","updated_at":"2020-06-23T17:57:58Z","closed_at":"2020-06-23T09:37:02Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/291","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/291","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/291.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/291.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/291\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/291\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/290","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/290\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/290\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/290\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/290","id":641978286,"node_id":"MDU6SXNzdWU2NDE5NzgyODY=","number":290,"title":"ConnectionError - Eli5 dataset download","user":{"login":"JovanNj","id":8490096,"node_id":"MDQ6VXNlcjg0OTAwOTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8490096?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JovanNj","html_url":"https:\/\/github.com\/JovanNj","followers_url":"https:\/\/api.github.com\/users\/JovanNj\/followers","following_url":"https:\/\/api.github.com\/users\/JovanNj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JovanNj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JovanNj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JovanNj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JovanNj\/orgs","repos_url":"https:\/\/api.github.com\/users\/JovanNj\/repos","events_url":"https:\/\/api.github.com\/users\/JovanNj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JovanNj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-19T13:40:33Z","updated_at":"2020-06-20T13:22:24Z","closed_at":"2020-06-20T13:22:24Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I have a problem with downloading Eli5 dataset. When typing `nlp.load_dataset('eli5')`, I get ConnectionError: Couldn't reach https:\/\/storage.googleapis.com\/huggingface-nlp\/cache\/datasets\/eli5\/LFQA_reddit\/1.0.0\/explain_like_im_five-train_eli5.arrow\r\n\r\nI would appreciate if you could help me with this issue.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/290\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/290\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/289","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/289\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/289\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/289\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/289","id":641934194,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM3MDc0MTM3","number":289,"title":"update xsum","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-06-19T12:28:32Z","updated_at":"2020-06-22T13:27:26Z","closed_at":"2020-06-22T07:20:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/289","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/289","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/289.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/289.patch","merged_at":"2020-06-22T07:20:07Z"},"body":"This PR makes the following update to the xsum dataset:\r\n\r\n- Manual download is not required anymore\r\n\r\n- dataset can be loaded as follow: `nlp.load_dataset('xsum')`\r\n\r\n\r\n**Important** \r\nInstead of using on outdated url to download the data:  \"https:\/\/raw.githubusercontent.com\/EdinburghNLP\/XSum\/master\/XSum-Dataset\/XSum-TRAINING-DEV-TEST-SPLIT-90-5-5.json\" \r\n\r\na more up-to-date url stored here: https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/summarization\/xsum.tar.gz is used\r\n, so that the user does not need to manually download the data anymore. \r\nThere might be slight breaking changes here for xsum. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/289\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/289\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/288","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/288\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/288\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/288\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/288","id":641888610,"node_id":"MDU6SXNzdWU2NDE4ODg2MTA=","number":288,"title":"Error at the first example in README: AttributeError: module 'dill' has no attribute '_dill'","user":{"login":"wutong8023","id":14964542,"node_id":"MDQ6VXNlcjE0OTY0NTQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14964542?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wutong8023","html_url":"https:\/\/github.com\/wutong8023","followers_url":"https:\/\/api.github.com\/users\/wutong8023\/followers","following_url":"https:\/\/api.github.com\/users\/wutong8023\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wutong8023\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wutong8023\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wutong8023\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wutong8023\/orgs","repos_url":"https:\/\/api.github.com\/users\/wutong8023\/repos","events_url":"https:\/\/api.github.com\/users\/wutong8023\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wutong8023\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-06-19T11:01:22Z","updated_at":"2020-06-21T09:05:11Z","closed_at":"2020-06-21T09:05:11Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/tensorflow\/python\/framework\/dtypes.py:469: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) \/ '(1,)type'.\r\n  _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\r\n\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/tensorflow\/python\/framework\/dtypes.py:470: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) \/ '(1,)type'.\r\n  _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\r\n\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/tensorflow\/python\/framework\/dtypes.py:471: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) \/ '(1,)type'.\r\n  _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\r\n\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/tensorflow\/python\/framework\/dtypes.py:472: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) \/ '(1,)type'.\r\n  _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\r\n\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/tensorflow\/python\/framework\/dtypes.py:473: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) \/ '(1,)type'.\r\n  _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\r\n\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/tensorflow\/python\/framework\/dtypes.py:476: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) \/ '(1,)type'.\r\n  np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\r\n\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/importlib\/_bootstrap.py:219: RuntimeWarning: compiletime version 3.5 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.6\r\n  return f(*args, **kwds)\r\n\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/h5py\/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\r\n  from ._conv import register_converters as _register_converters\r\nTraceback (most recent call last):\r\n  File \"\/Users\/parasol_tree\/Resource\/019 - Github\/AcademicEnglishToolkit \/test.py\", line 7, in \r\n    import nlp\r\n  File \"\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/nlp\/__init__.py\", line 27, in \r\n    from .arrow_dataset import Dataset\r\n  File \"\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/nlp\/arrow_dataset.py\", line 31, in \r\n    from nlp.utils.py_utils import dumps\r\n  File \"\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/nlp\/utils\/__init__.py\", line 20, in \r\n    from .download_manager import DownloadManager, GenerateMode\r\n  File \"\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/nlp\/utils\/download_manager.py\", line 25, in \r\n    from .py_utils import flatten_nested, map_nested, size_str\r\n  File \"\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/nlp\/utils\/py_utils.py\", line 244, in \r\n    class Pickler(dill.Pickler):\r\n  File \"\/Users\/parasol_tree\/anaconda3\/lib\/python3.6\/site-packages\/nlp\/utils\/py_utils.py\", line 247, in Pickler\r\n    dispatch = dill._dill.MetaCatchingDict(dill.Pickler.dispatch.copy())\r\nAttributeError: module 'dill' has no attribute '_dill'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/288\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/288\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/287","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/287\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/287\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/287\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/287","id":641800227,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM2OTY0NTg0","number":287,"title":"fix squad_v2 metric","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-19T08:24:46Z","updated_at":"2020-06-19T08:33:43Z","closed_at":"2020-06-19T08:33:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/287","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/287","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/287.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/287.patch","merged_at":"2020-06-19T08:33:41Z"},"body":"Fix #280 \r\nThe imports were wrong","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/287\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/287\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/286","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/286\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/286\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/286\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/286","id":641585758,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM2NzkzMjI4","number":286,"title":"Add ANLI dataset.","user":{"login":"easonnie","id":11016329,"node_id":"MDQ6VXNlcjExMDE2MzI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11016329?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/easonnie","html_url":"https:\/\/github.com\/easonnie","followers_url":"https:\/\/api.github.com\/users\/easonnie\/followers","following_url":"https:\/\/api.github.com\/users\/easonnie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/easonnie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/easonnie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/easonnie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/easonnie\/orgs","repos_url":"https:\/\/api.github.com\/users\/easonnie\/repos","events_url":"https:\/\/api.github.com\/users\/easonnie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/easonnie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-18T22:27:30Z","updated_at":"2020-06-22T12:23:27Z","closed_at":"2020-06-22T12:23:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/286","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/286","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/286.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/286.patch","merged_at":"2020-06-22T12:23:26Z"},"body":"I completed all the steps in https:\/\/github.com\/huggingface\/nlp\/blob\/master\/CONTRIBUTING.md#how-to-add-a-dataset and push the code for ANLI. Please let me know if there are any errors.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/286\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/286\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/285","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/285\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/285\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/285\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/285","id":641360702,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM2NjAyMjk4","number":285,"title":"Consistent formatting of citations","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-18T16:25:23Z","updated_at":"2020-06-22T08:09:25Z","closed_at":"2020-06-22T08:09:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/285","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/285","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/285.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/285.patch","merged_at":"2020-06-22T08:09:23Z"},"body":"#283 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/285\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/285\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/284","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/284\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/284\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/284\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/284","id":641337217,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM2NTgxODQ2","number":284,"title":"Fix manual download instructions","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-06-18T15:59:57Z","updated_at":"2020-06-19T08:24:21Z","closed_at":"2020-06-19T08:24:19Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/284","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/284","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/284.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/284.patch","merged_at":"2020-06-19T08:24:19Z"},"body":"This PR replaces the static `DatasetBulider` variable `MANUAL_DOWNLOAD_INSTRUCTIONS` by a property function `manual_download_instructions()`. \r\n\r\nSome datasets like XTREME and all WMT need the manual data dir only for a small fraction of the possible configs.\r\n\r\nAfter some brainstorming with @mariamabarham and @lhoestq, we came to the conclusion that having a property function `manual_download_instructions()` gives us more flexibility to decide on a per config basis in the dataset builder if manual download instructions are needed.\r\n\r\nAlso this PR should unblock solves a bug with `wmt16 - ro-en` \r\n@sshleifer from this branch you should be able to succesfully run\r\n\r\n```python \r\nimport nlp \r\nds = nlp.load_dataset('.\/datasets\/wmt16', 'ro-en')\r\n```\r\n\r\nand once this PR is merged S3 should be synched so that \r\n\r\n```python\r\nimport nlp\r\nds = nlp.load_dataset(\"wmt16\", \"ro-en\")\r\n```\r\n\r\nworks as well.\r\n\r\n**Important**: Since `MANUAL_DOWNLOAD_INSTRUCTIONS` was not really exposed to the user, this PR should not be a problem regarding backward compatibility.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/284\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/284\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/283","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/283\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/283\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/283\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/283","id":641270439,"node_id":"MDU6SXNzdWU2NDEyNzA0Mzk=","number":283,"title":"Consistent formatting of citations","user":{"login":"srush","id":35882,"node_id":"MDQ6VXNlcjM1ODgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35882?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/srush","html_url":"https:\/\/github.com\/srush","followers_url":"https:\/\/api.github.com\/users\/srush\/followers","following_url":"https:\/\/api.github.com\/users\/srush\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/srush\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/srush\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/srush\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/srush\/orgs","repos_url":"https:\/\/api.github.com\/users\/srush\/repos","events_url":"https:\/\/api.github.com\/users\/srush\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/srush\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2020-06-18T14:48:45Z","updated_at":"2020-06-22T17:30:46Z","closed_at":"2020-06-22T17:30:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The citations are all of a different format, some have \"```\" and have text inside, others are proper bibtex. \r\n\r\nCan we make it so that they all are proper citations, i.e. parse by the bibtex spec:\r\n\r\nhttps:\/\/bibtexparser.readthedocs.io\/en\/master\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/283\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/283\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/282","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/282\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/282\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/282\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/282","id":641217759,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM2NDgxNzMy","number":282,"title":"Update dataset_info from gcs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-18T13:41:15Z","updated_at":"2020-06-18T16:24:52Z","closed_at":"2020-06-18T16:24:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/282","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/282","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/282.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/282.patch","merged_at":"2020-06-18T16:24:51Z"},"body":"Some datasets are hosted on gcs (wikipedia for example). In this PR I make sure that, when a user loads such datasets, the file_instructions are built using the dataset_info.json from gcs and not from the info extracted from the local `dataset_infos.json` (the one that contain the info for each config). Indeed local files may end up outdated.\r\n\r\nFurthermore, to avoid outdated dataset_infos.json, I now make sure that each time you run `load_dataset` it also tries to update the file locally.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/282\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/282\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/281","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/281\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/281\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/281\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/281","id":641067856,"node_id":"MDU6SXNzdWU2NDEwNjc4NTY=","number":281,"title":"Private\/sensitive data","user":{"login":"MFreidank","id":6368040,"node_id":"MDQ6VXNlcjYzNjgwNDA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6368040?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MFreidank","html_url":"https:\/\/github.com\/MFreidank","followers_url":"https:\/\/api.github.com\/users\/MFreidank\/followers","following_url":"https:\/\/api.github.com\/users\/MFreidank\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MFreidank\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MFreidank\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MFreidank\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MFreidank\/orgs","repos_url":"https:\/\/api.github.com\/users\/MFreidank\/repos","events_url":"https:\/\/api.github.com\/users\/MFreidank\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MFreidank\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-06-18T09:47:27Z","updated_at":"2020-06-20T13:15:12Z","closed_at":"2020-06-20T13:15:12Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi all,\r\nThanks for this fantastic library, it makes it very easy to do prototyping for NLP projects interchangeably between TF\/Pytorch. \r\n\r\nUnfortunately, there is data that cannot easily be shared publicly as it may contain sensitive information. \r\nIs there support\/a plan to support such data with NLP, e.g. by reading it from local sources?\r\n\r\nUse case flow could look like this: use NLP to prototype an approach on similar, public data and apply the resulting prototype on sensitive\/private data without the need to rethink data processing pipelines. \r\n\r\nMany thanks for your responses ahead of time and kind regards,\r\nMFreidank","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/281\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/281\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/280","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/280\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/280\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/280\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/280","id":640677615,"node_id":"MDU6SXNzdWU2NDA2Nzc2MTU=","number":280,"title":"Error with SquadV2 Metrics","user":{"login":"avinregmi","id":32203792,"node_id":"MDQ6VXNlcjMyMjAzNzky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32203792?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/avinregmi","html_url":"https:\/\/github.com\/avinregmi","followers_url":"https:\/\/api.github.com\/users\/avinregmi\/followers","following_url":"https:\/\/api.github.com\/users\/avinregmi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/avinregmi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/avinregmi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/avinregmi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/avinregmi\/orgs","repos_url":"https:\/\/api.github.com\/users\/avinregmi\/repos","events_url":"https:\/\/api.github.com\/users\/avinregmi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/avinregmi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-17T19:10:54Z","updated_at":"2020-06-19T08:33:41Z","closed_at":"2020-06-19T08:33:41Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I can't seem to import squad v2 metrics. \r\n\r\n**squad_metric = nlp.load_metric('squad_v2')**\r\n\r\n**This throws me an error.:**\r\n\r\n\r\n```\r\nImportError                               Traceback (most recent call last)\r\n in \r\n----> 1 squad_metric = nlp.load_metric('squad_v2')\r\n\r\n~\/env\/lib64\/python3.6\/site-packages\/nlp\/load.py in load_metric(path, name, process_id, num_process, data_dir, experiment_id, in_memory, download_config, **metric_init_kwargs)\r\n    426     \"\"\"\r\n    427     module_path = prepare_module(path, download_config=download_config, dataset=False)\r\n--> 428     metric_cls = import_main_class(module_path, dataset=False)\r\n    429     metric = metric_cls(\r\n    430         name=name,\r\n\r\n~\/env\/lib64\/python3.6\/site-packages\/nlp\/load.py in import_main_class(module_path, dataset)\r\n     55     \"\"\"\r\n     56     importlib.invalidate_caches()\r\n---> 57     module = importlib.import_module(module_path)\r\n     58 \r\n     59     if dataset:\r\n\r\n\/usr\/lib64\/python3.6\/importlib\/__init__.py in import_module(name, package)\r\n    124                 break\r\n    125             level += 1\r\n--> 126     return _bootstrap._gcd_import(name[level:], package, level)\r\n    127 \r\n    128 \r\n\r\n\/usr\/lib64\/python3.6\/importlib\/_bootstrap.py in _gcd_import(name, package, level)\r\n\r\n\/usr\/lib64\/python3.6\/importlib\/_bootstrap.py in _find_and_load(name, import_)\r\n\r\n\/usr\/lib64\/python3.6\/importlib\/_bootstrap.py in _find_and_load_unlocked(name, import_)\r\n\r\n\/usr\/lib64\/python3.6\/importlib\/_bootstrap.py in _load_unlocked(spec)\r\n\r\n\/usr\/lib64\/python3.6\/importlib\/_bootstrap_external.py in exec_module(self, module)\r\n\r\n\/usr\/lib64\/python3.6\/importlib\/_bootstrap.py in _call_with_frames_removed(f, *args, **kwds)\r\n\r\n~\/env\/lib64\/python3.6\/site-packages\/nlp\/metrics\/squad_v2\/a15e787c76889174874386d3def75321f0284c11730d2a57e28fe1352c9b5c7a\/squad_v2.py in \r\n     16 \r\n     17 import nlp\r\n---> 18 from .evaluate import evaluate\r\n     19 \r\n     20 _CITATION = \"\"\"\\\r\n\r\nImportError: cannot import name 'evaluate'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/280\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/280\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/279","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/279\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/279\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/279\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/279","id":640611692,"node_id":"MDU6SXNzdWU2NDA2MTE2OTI=","number":279,"title":"Dataset Preprocessing Cache with .map() function not working as expected","user":{"login":"sarahwie","id":8027676,"node_id":"MDQ6VXNlcjgwMjc2NzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8027676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sarahwie","html_url":"https:\/\/github.com\/sarahwie","followers_url":"https:\/\/api.github.com\/users\/sarahwie\/followers","following_url":"https:\/\/api.github.com\/users\/sarahwie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sarahwie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sarahwie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sarahwie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sarahwie\/orgs","repos_url":"https:\/\/api.github.com\/users\/sarahwie\/repos","events_url":"https:\/\/api.github.com\/users\/sarahwie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sarahwie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-06-17T17:17:21Z","updated_at":"2021-07-06T21:43:28Z","closed_at":"2021-04-18T23:43:49Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I've been having issues with reproducibility when loading and processing datasets with the `.map` function. I was only able to resolve them by clearing all of the cache files on my system. \r\n\r\nIs there a way to disable using the cache when processing a dataset? As I make minor processing changes on the same dataset, I want to be able to be certain the data is being re-processed rather than loaded from a cached file. \r\n\r\nCould you also help me understand a bit more about how the caching functionality is used for pre-processing? E.g. how is it determined when to load from a cache vs. reprocess. \r\nI was particularly having an issue where the correct dataset splits were loaded, but as soon as I applied the `.map()` function to each split independently, they somehow all exited this process having been converted to the test set.\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/279\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/279\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/278","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/278\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/278\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/278\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/278","id":640518917,"node_id":"MDU6SXNzdWU2NDA1MTg5MTc=","number":278,"title":"MemoryError when loading German Wikipedia","user":{"login":"gregburman","id":4698028,"node_id":"MDQ6VXNlcjQ2OTgwMjg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4698028?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gregburman","html_url":"https:\/\/github.com\/gregburman","followers_url":"https:\/\/api.github.com\/users\/gregburman\/followers","following_url":"https:\/\/api.github.com\/users\/gregburman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gregburman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gregburman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gregburman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gregburman\/orgs","repos_url":"https:\/\/api.github.com\/users\/gregburman\/repos","events_url":"https:\/\/api.github.com\/users\/gregburman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gregburman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-06-17T15:06:21Z","updated_at":"2020-06-19T12:53:02Z","closed_at":"2020-06-19T12:53:02Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, first off let me say thank you for all the awesome work you're doing at Hugging Face across all your projects (NLP, Transformers, Tokenizers) - they're all amazing contributions to us working with NLP models :)\r\n\r\nI'm trying to download the German Wikipedia dataset as follows:\r\n\r\n```\r\nwiki = nlp.load_dataset(\"wikipedia\", \"20200501.de\", split=\"train\")\r\n```\r\n\r\nHowever, when I do so, I get the following error:\r\n\r\n```\r\nDownloading and preparing dataset wikipedia\/20200501.de (download: Unknown size, generated: Unknown size, total: Unknown size) to \/home\/ubuntu\/.cache\/huggingface\/datasets\/wikipedia\/20200501.de\/1.0.0...\r\nTraceback (most recent call last):\r\n  File \"\", line 1, in \r\n  File \"\/home\/ubuntu\/anaconda3\/envs\/albert\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 520, in load_dataset\r\n    save_infos=save_infos,\r\n  File \"\/home\/ubuntu\/anaconda3\/envs\/albert\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 433, in download_and_prepare\r\n    dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n  File \"\/home\/ubuntu\/anaconda3\/envs\/albert\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 824, in _download_and_prepare\r\n    \"\\n\\t`{}`\".format(usage_example)\r\nnlp.builder.MissingBeamOptions: Trying to generate a dataset using Apache Beam, yet no Beam Runner or PipelineOptions() has been provided in `load_dataset` or in the builder arguments. For big datasets it has to run on large-scale data processing tools like Dataflow, Spark, etc. More information about Apache Beam runners at https:\/\/beam.apache.org\/documentation\/runners\/capability-matrix\/\r\nIf you really want to run it locally because you feel like the Dataset is small enough, you can use the local beam runner called `DirectRunner` (you may run out of memory). \r\nExample of usage: \r\n\t`load_dataset('wikipedia', '20200501.de', beam_runner='DirectRunner')`\r\n```\r\n\r\nSo, following on from the example usage at the bottom, I tried specifying `beam_runner='DirectRunner`, however when I do this after about 20 min after the data has all downloaded, I get a `MemoryError` as warned.\r\n\r\nThis isn't an issue for the English or French Wikipedia datasets (I've tried both), as neither seem to require that `beam_runner` be specified. Can you please clarify why this is an issue for the German dataset?\r\n\r\nMy nlp version is 0.2.1.\r\n\r\nThank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/278\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/278\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/277","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/277\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/277\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/277\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/277","id":640163053,"node_id":"MDU6SXNzdWU2NDAxNjMwNTM=","number":277,"title":"Empty samples in glue\/qqp","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-17T05:54:52Z","updated_at":"2020-06-21T00:21:45Z","closed_at":"2020-06-21T00:21:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nqqp = nlp.load_dataset('glue', 'qqp')\r\nprint(qqp['train'][310121])\r\nprint(qqp['train'][362225])\r\n```\r\n```\r\n{'question1': 'How can I create an Android app?', 'question2': '', 'label': 0, 'idx': 310137}\r\n{'question1': 'How can I develop android app?', 'question2': '', 'label': 0, 'idx': 362246}\r\n```\r\nNotice that question 2 is empty string. \r\nBTW, I have checked and these two are the only naughty ones in all splits of qqp.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/277\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/277\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/276","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/276\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/276\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/276\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/276","id":639490858,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM1MDY5Nzg5","number":276,"title":"Fix metric compute (original_instructions missing)","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-16T08:52:01Z","updated_at":"2020-06-18T07:41:45Z","closed_at":"2020-06-18T07:41:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/276","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/276","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/276.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/276.patch","merged_at":"2020-06-18T07:41:43Z"},"body":"When loading arrow data we added in cc8d250 a way to specify the instructions that were used to store them with the loaded dataset.\r\nHowever metrics load data the same way but don't need instructions (we use one single file).\r\n\r\nIn this PR I just make `original_instructions` optional when reading files to load a `Dataset` object.\r\n\r\nThis should fix #269 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/276\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/276\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/275","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/275\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/275\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/275\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/275","id":639439052,"node_id":"MDU6SXNzdWU2Mzk0MzkwNTI=","number":275,"title":"NonMatchingChecksumError when loading pubmed dataset","user":{"login":"DavideStenner","id":48441753,"node_id":"MDQ6VXNlcjQ4NDQxNzUz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48441753?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/DavideStenner","html_url":"https:\/\/github.com\/DavideStenner","followers_url":"https:\/\/api.github.com\/users\/DavideStenner\/followers","following_url":"https:\/\/api.github.com\/users\/DavideStenner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/DavideStenner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/DavideStenner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/DavideStenner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/DavideStenner\/orgs","repos_url":"https:\/\/api.github.com\/users\/DavideStenner\/repos","events_url":"https:\/\/api.github.com\/users\/DavideStenner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/DavideStenner\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-16T07:31:51Z","updated_at":"2020-06-19T07:37:07Z","closed_at":"2020-06-19T07:37:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I get this error when i run `nlp.load_dataset('scientific_papers', 'pubmed', split = 'train[:50%]')`.\r\nThe error is:\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nNonMatchingChecksumError                  Traceback (most recent call last)\r\n in ()\r\n----> 1 df = nlp.load_dataset('scientific_papers', 'pubmed', split = 'train[:50%]')\r\n      2 df = pd.DataFrame(df)\r\n      3 gc.collect()\r\n\r\n3 frames\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n    518         download_mode=download_mode,\r\n    519         ignore_verifications=ignore_verifications,\r\n--> 520         save_infos=save_infos,\r\n    521     )\r\n    522 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n    431                 verify_infos = not save_infos and not ignore_verifications\r\n    432                 self._download_and_prepare(\r\n--> 433                     dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n    434                 )\r\n    435                 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n    468         # Checksums verification\r\n    469         if verify_infos:\r\n--> 470             verify_checksums(self.info.download_checksums, dl_manager.get_recorded_sizes_checksums())\r\n    471         for split_generator in split_generators:\r\n    472             if str(split_generator.split_info.name).lower() == \"all\":\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums)\r\n     34     bad_urls = [url for url in expected_checksums if expected_checksums[url] != recorded_checksums[url]]\r\n     35     if len(bad_urls) > 0:\r\n---> 36         raise NonMatchingChecksumError(str(bad_urls))\r\n     37     logger.info(\"All the checksums matched successfully.\")\r\n     38 \r\n\r\nNonMatchingChecksumError: ['https:\/\/drive.google.com\/uc?id=1b3rmCSIoh6VhD4HKWjI4HOW-cSwcwbeC&export=download', 'https:\/\/drive.google.com\/uc?id=1lvsqvsFi3W-pE1SqNZI0s8NR9rC1tsja&export=download']\r\n```\r\nI'm currently working on google colab.\r\n\r\nThat is quite strange because yesterday it was fine.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/275\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/275\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/274","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/274\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/274\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/274\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/274","id":639156625,"node_id":"MDU6SXNzdWU2MzkxNTY2MjU=","number":274,"title":"PG-19","user":{"login":"lucidrains","id":108653,"node_id":"MDQ6VXNlcjEwODY1Mw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/108653?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucidrains","html_url":"https:\/\/github.com\/lucidrains","followers_url":"https:\/\/api.github.com\/users\/lucidrains\/followers","following_url":"https:\/\/api.github.com\/users\/lucidrains\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucidrains\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucidrains\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucidrains\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucidrains\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucidrains\/repos","events_url":"https:\/\/api.github.com\/users\/lucidrains\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucidrains\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-06-15T21:02:26Z","updated_at":"2020-07-06T15:35:02Z","closed_at":"2020-07-06T15:35:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, and thanks for all your open-sourced work, as always!\r\n\r\nI was wondering if you would be open to adding PG-19 to your collection of datasets. https:\/\/github.com\/deepmind\/pg19 It is often used for benchmarking long-range language modeling.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/274\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/274\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/273","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/273\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/273\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/273\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/273","id":638968054,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM0NjM0MzU4","number":273,"title":"update cos_e to add cos_e v1.0","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-15T16:03:22Z","updated_at":"2020-06-16T08:25:54Z","closed_at":"2020-06-16T08:25:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/273","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/273","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/273.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/273.patch","merged_at":"2020-06-16T08:25:52Z"},"body":"This PR updates the cos_e dataset to add  v1.0 as requested here #163 \r\n@nazneenrajani","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/273\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/273\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/272","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/272\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/272\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/272\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/272","id":638307313,"node_id":"MDExOlB1bGxSZXF1ZXN0NDM0MTExOTQ3","number":272,"title":"asd","user":{"login":"sn696","id":66900970,"node_id":"MDQ6VXNlcjY2OTAwOTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66900970?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sn696","html_url":"https:\/\/github.com\/sn696","followers_url":"https:\/\/api.github.com\/users\/sn696\/followers","following_url":"https:\/\/api.github.com\/users\/sn696\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sn696\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sn696\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sn696\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sn696\/orgs","repos_url":"https:\/\/api.github.com\/users\/sn696\/repos","events_url":"https:\/\/api.github.com\/users\/sn696\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sn696\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-14T08:20:38Z","updated_at":"2020-06-14T09:16:41Z","closed_at":"2020-06-14T09:16:41Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/272","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/272","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/272.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/272.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/272\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/272\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/271","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/271\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/271\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/271\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/271","id":638135754,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMzOTg3NDkw","number":271,"title":"Fix allocin\u00e9 dataset configuration","user":{"login":"TheophileBlard","id":37028092,"node_id":"MDQ6VXNlcjM3MDI4MDky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37028092?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TheophileBlard","html_url":"https:\/\/github.com\/TheophileBlard","followers_url":"https:\/\/api.github.com\/users\/TheophileBlard\/followers","following_url":"https:\/\/api.github.com\/users\/TheophileBlard\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TheophileBlard\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TheophileBlard\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TheophileBlard\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TheophileBlard\/orgs","repos_url":"https:\/\/api.github.com\/users\/TheophileBlard\/repos","events_url":"https:\/\/api.github.com\/users\/TheophileBlard\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TheophileBlard\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-06-13T10:12:10Z","updated_at":"2020-06-18T07:41:21Z","closed_at":"2020-06-18T07:41:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/271","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/271","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/271.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/271.patch","merged_at":null},"body":"This is a patch for #244. According to the [live nlp viewer](url), the Allocin\u00e9 dataset must be loaded with :\r\n```python\r\ndataset = load_dataset('allocine', 'allocine')\r\n```\r\nThis is redundant, as there is only one \"dataset configuration\", and should only be:\r\n```python\r\ndataset = load_dataset('allocine')\r\n```\r\n\r\nThis is my mistake, because the code for [`allocine.py`](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/datasets\/allocine\/allocine.py) was inspired by [`imdb.py`](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/datasets\/imdb\/imdb.py), which also force the user to specify the \"dataset configuration\" (even if there is only one).\r\n\r\nI believe this PR should solve this issue, making the Allocin\u00e9 dataset more convenient to use.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/271\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/271\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/270","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/270\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/270\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/270\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/270","id":638121617,"node_id":"MDU6SXNzdWU2MzgxMjE2MTc=","number":270,"title":"c4 dataset is not viewable in nlpviewer demo","user":{"login":"rajarsheem","id":6441313,"node_id":"MDQ6VXNlcjY0NDEzMTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6441313?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rajarsheem","html_url":"https:\/\/github.com\/rajarsheem","followers_url":"https:\/\/api.github.com\/users\/rajarsheem\/followers","following_url":"https:\/\/api.github.com\/users\/rajarsheem\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rajarsheem\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rajarsheem\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rajarsheem\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rajarsheem\/orgs","repos_url":"https:\/\/api.github.com\/users\/rajarsheem\/repos","events_url":"https:\/\/api.github.com\/users\/rajarsheem\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rajarsheem\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-13T08:26:16Z","updated_at":"2020-10-27T15:35:29Z","closed_at":"2020-10-27T15:35:13Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I get the following error when I try to view the c4 dataset in [nlpviewer](https:\/\/huggingface.co\/nlp\/viewer\/)\r\n\r\n```python\r\nModuleNotFoundError: No module named 'langdetect'\r\nTraceback:\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/streamlit\/ScriptRunner.py\", line 322, in _run_script\r\n    exec(code, module.__dict__)\r\nFile \"\/home\/sasha\/nlp_viewer\/run.py\", line 54, in \r\n    configs = get_confs(option.id)\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/streamlit\/caching.py\", line 591, in wrapped_func\r\n    return get_or_create_cached_value()\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/streamlit\/caching.py\", line 575, in get_or_create_cached_value\r\n    return_value = func(*args, **kwargs)\r\nFile \"\/home\/sasha\/nlp_viewer\/run.py\", line 48, in get_confs\r\n    builder_cls = nlp.load.import_main_class(module_path, dataset=True)\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 57, in import_main_class\r\n    module = importlib.import_module(module_path)\r\nFile \"\/usr\/lib\/python3.7\/importlib\/__init__.py\", line 127, in import_module\r\n    return _bootstrap._gcd_import(name[level:], package, level)\r\nFile \"\", line 1006, in _gcd_import\r\nFile \"\", line 983, in _find_and_load\r\nFile \"\", line 967, in _find_and_load_unlocked\r\nFile \"\", line 677, in _load_unlocked\r\nFile \"\", line 728, in exec_module\r\nFile \"\", line 219, in _call_with_frames_removed\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/nlp\/datasets\/c4\/88bb1b1435edad3fb772325710c4a43327cbf4a23b9030094556e6f01e14ec19\/c4.py\", line 29, in \r\n    from .c4_utils import (\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/nlp\/datasets\/c4\/88bb1b1435edad3fb772325710c4a43327cbf4a23b9030094556e6f01e14ec19\/c4_utils.py\", line 29, in \r\n    import langdetect\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/270\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/270\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/269","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/269\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/269\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/269\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/269","id":638106774,"node_id":"MDU6SXNzdWU2MzgxMDY3NzQ=","number":269,"title":"Error in metric.compute: missing `original_instructions` argument","user":{"login":"zphang","id":1668462,"node_id":"MDQ6VXNlcjE2Njg0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1668462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zphang","html_url":"https:\/\/github.com\/zphang","followers_url":"https:\/\/api.github.com\/users\/zphang\/followers","following_url":"https:\/\/api.github.com\/users\/zphang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zphang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zphang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zphang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zphang\/orgs","repos_url":"https:\/\/api.github.com\/users\/zphang\/repos","events_url":"https:\/\/api.github.com\/users\/zphang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zphang\/received_events","type":"User","site_admin":false},"labels":[{"id":2067393914,"node_id":"MDU6TGFiZWwyMDY3MzkzOTE0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20bug","name":"metric bug","color":"25b21e","default":false,"description":"A bug in a metric script"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2020-06-13T06:26:54Z","updated_at":"2020-06-18T07:41:44Z","closed_at":"2020-06-18T07:41:44Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm running into an error using metrics for computation in the latest master as well as version 0.2.1. Here is a minimal example:\r\n\r\n```python\r\nimport nlp\r\nrte_metric = nlp.load_metric('glue', name=\"rte\")\r\nrte_metric.compute(\r\n    [0, 0, 1, 1],\r\n    [0, 1, 0, 1],\r\n)\r\n```\r\n\r\n```\r\n    181             # Read the predictions and references\r\n    182             reader = ArrowReader(path=self.data_dir, info=None)\r\n--> 183             self.data = reader.read_files(node_files)\r\n    184 \r\n    185             # Release all of our locks\r\n\r\nTypeError: read_files() missing 1 required positional argument: 'original_instructions'\r\n```\r\n\r\nI believe this might have been introduced with cc8d2508b75f7ba0e5438d0686ee02dcec43c7f4, which added the `original_instructions` argument. Elsewhere, an empty-string default is provided--perhaps that could be done here too?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/269\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/269\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/268","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/268\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/268\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/268\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/268","id":637848056,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMzNzU5NzQ1","number":268,"title":"add Rotten Tomatoes Movie Review sentences sentiment dataset","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-12T15:53:59Z","updated_at":"2020-06-18T07:46:24Z","closed_at":"2020-06-18T07:46:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/268","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/268","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/268.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/268.patch","merged_at":"2020-06-18T07:46:23Z"},"body":"Sentence-level movie reviews v1.0 from here: http:\/\/www.cs.cornell.edu\/people\/pabo\/movie-review-data\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/268\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/268\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/267","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/267\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/267\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/267\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/267","id":637415545,"node_id":"MDU6SXNzdWU2Mzc0MTU1NDU=","number":267,"title":"How can I load\/find WMT en-romanian?","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2020-06-12T01:09:37Z","updated_at":"2020-06-19T08:24:19Z","closed_at":"2020-06-19T08:24:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I believe it is from `wmt16`\r\n\r\nWhen I run\r\n\r\n```python\r\nwmt = nlp.load_dataset('wmt16')\r\n```\r\nI get:\r\n```python\r\nAssertionError: The dataset wmt16 with config cs-en requires manual data. \r\n Please follow the manual download instructions:   Some of the wmt configs here, require a manual download.\r\n  Please look into wmt.py to see the exact path (and file name) that has to\r\n  be downloaded.\r\n  . \r\n Manual data can be loaded with `nlp.load(wmt16, data_dir='')\r\n```\r\nThere is no wmt.py,as the error message suggests, and wmt16.py doesn't have manual download instructions.\r\n\r\nAny idea how to do this?\r\n\r\nThanks in advance!\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/267\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/267\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/266","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/266\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/266\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/266\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/266","id":637156392,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMzMTk1NDgw","number":266,"title":"Add sort, shuffle, test_train_split and select methods","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-06-11T16:22:20Z","updated_at":"2020-06-18T16:23:25Z","closed_at":"2020-06-18T16:23:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/266","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/266","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/266.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/266.patch","merged_at":"2020-06-18T16:23:23Z"},"body":"Add a bunch of methods to reorder\/split\/select rows in a dataset:\r\n- `dataset.select(indices)`: Create a new dataset with rows selected following the list\/array of indices (which can have a different size than the dataset and contain duplicated indices, the only constrain is that all the integers in the list must be smaller than the dataset size, otherwise we're indexing outside the dataset...)\r\n- `dataset.sort(column_name)`: sort a dataset according to a column (has to be a column with a numpy compatible type)\r\n- `dataset.shuffle(seed)`: shuffle a dataset rows\r\n- `dataset.train_test_split(test_size, train_size)`: Return a dictionary with two random train and test subsets (`train` and `test` ``Dataset`` splits)\r\n\r\nAll these methods are **not** in-place which means they return new ``Dataset``.\r\nThis is the default behavior in the library.\r\n\r\nFix #147 #166 #259 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/266\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/266\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/265","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/265\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/265\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/265\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/265","id":637139220,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMzMTgxNDMz","number":265,"title":"Add pyarrow warning colab","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-11T15:57:51Z","updated_at":"2020-08-02T18:14:36Z","closed_at":"2020-06-12T08:14:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/265","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/265","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/265.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/265.patch","merged_at":"2020-06-12T08:14:16Z"},"body":"When a user installs `nlp` on google colab, then google colab doesn't update pyarrow, and the runtime needs to be restarted to use the updated version of pyarrow.\r\n\r\nThis is an issue because `nlp` requires the updated version to work correctly.\r\n\r\nIn this PR I added en error that is shown to the user in google colab if the user tries to `import nlp` without having restarted the runtime. The error tells the user to restart the runtime.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/265\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/265\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/264","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/264\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/264\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/264\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/264","id":637106170,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMzMTU0ODQ4","number":264,"title":"Fix small issues creating dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-11T15:20:16Z","updated_at":"2020-06-12T08:15:57Z","closed_at":"2020-06-12T08:15:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/264","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/264","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/264.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/264.patch","merged_at":"2020-06-12T08:15:56Z"},"body":"Fix many small issues mentioned in #249:\r\n- don't force to install apache beam for commands\r\n- fix None cache dir when using `dl_manager.download_custom`\r\n- added new extras in `setup.py` named `dev` that contains tests and quality dependencies\r\n- mock dataset sizes when running tests with dummy data\r\n- add a note about the naming convention of datasets (camel case - snake case) in CONTRIBUTING.md\r\n\r\nThis should help users create their datasets.\r\nNext step is the `add_dataset.md` docs :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/264\/reactions","total_count":2,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/264\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/263","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/263\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/263\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/263\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/263","id":637028015,"node_id":"MDU6SXNzdWU2MzcwMjgwMTU=","number":263,"title":"[Feature request] Support for external modality for language datasets","user":{"login":"aleSuglia","id":1479733,"node_id":"MDQ6VXNlcjE0Nzk3MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1479733?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aleSuglia","html_url":"https:\/\/github.com\/aleSuglia","followers_url":"https:\/\/api.github.com\/users\/aleSuglia\/followers","following_url":"https:\/\/api.github.com\/users\/aleSuglia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aleSuglia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aleSuglia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aleSuglia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aleSuglia\/orgs","repos_url":"https:\/\/api.github.com\/users\/aleSuglia\/repos","events_url":"https:\/\/api.github.com\/users\/aleSuglia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aleSuglia\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-06-11T13:42:18Z","updated_at":"2022-02-10T13:26:35Z","closed_at":"2022-02-10T13:26:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"# Background\r\n\r\nIn recent years many researchers have advocated that learning meanings from text-based only datasets is just like asking a human to \"learn to speak by listening to the radio\" [[E. Bender and A. Koller,2020](https:\/\/openreview.net\/forum?id=GKTvAcb12b), [Y. Bisk et. al, 2020](https:\/\/arxiv.org\/abs\/2004.10151)]. Therefore, the importance of multi-modal datasets for the NLP community is of paramount importance for next-generation models. For this reason, I raised a [concern](https:\/\/github.com\/huggingface\/nlp\/pull\/236#issuecomment-639832029) related to the best way to integrate external features in NLP datasets (e.g., visual features associated with an image, audio features associated with a recording, etc.). This would be of great importance for a more systematic way of representing data for ML models that are learning from multi-modal data. \r\n\r\n# Language + Vision\r\n\r\n## Use case\r\nTypically, people working on Language+Vision tasks, have a reference dataset (either in JSON or JSONL format) and for each example, they have an identifier that specifies the reference image. For a practical example, you can refer to the [GQA](https:\/\/cs.stanford.edu\/people\/dorarad\/gqa\/download.html#seconddown) dataset.\r\n\r\nCurrently, images are represented by either pooling-based features (average pooling of ResNet or VGGNet features, see [DeVries et.al, 2017](https:\/\/arxiv.org\/abs\/1611.08481), [Shekhar et.al, 2019](https:\/\/www.aclweb.org\/anthology\/N19-1265.pdf)) where you have a single vector for every image. Another option is to use a set of feature maps for every image extracted from a specific layer of a CNN (see [Xu et.al, 2015](https:\/\/arxiv.org\/abs\/1502.03044)). A more recent option, especially with large-scale multi-modal transformers [Li et. al, 2019](https:\/\/arxiv.org\/abs\/1908.03557), is to use FastRCNN features. \r\n\r\nFor all these types of features, people use one of the following formats:\r\n1. [HD5F](https:\/\/pypi.org\/project\/h5py\/)\r\n2. [NumPy](https:\/\/numpy.org\/doc\/stable\/reference\/generated\/numpy.savez.html)\r\n3. [LMDB](https:\/\/lmdb.readthedocs.io\/en\/release\/)\r\n\r\n## Implementation considerations\r\n\r\nI was thinking about possible ways of implementing this feature. As mentioned above, depending on the model, different visual features can be used. This step usually relies on another model (say ResNet-101) that is used to generate the visual features for each image used in the dataset. Typically, this step is done in a separate script that completes the feature generation procedure. The usual processing steps for these datasets are the following:\r\n\r\n1. Download dataset\r\n2. Download images associated with the dataset\r\n3. Write a script that generates the visual features for every image and store them in a specific file\r\n4. Create a DataLoader that maps the visual features to the corresponding language example\r\n\r\nIn my personal projects, I've decided to ignore HD5F because it doesn't have out-of-the-box support for multi-processing (see this PyTorch [issue](https:\/\/github.com\/pytorch\/pytorch\/issues\/11929)). I've been successfully using a NumPy compressed file for each image so that I can store any sort of information in it.\r\n\r\nFor ease of use of all these Language+Vision datasets, it would be really handy to have a way to associate the visual features with the text and store them in an efficient way. That's why I immediately thought about the HuggingFace NLP backend based on Apache Arrow. The assumption here is that the external modality will be mapped to a N-dimensional tensor so easily represented by a NumPy array. \r\n\r\nLooking forward to hearing your thoughts about it!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/263\/reactions","total_count":23,"+1":18,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":1,"rocket":0,"eyes":4},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/263\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/262","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/262\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/262\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/262\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/262","id":636702849,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMyODI3Mzcz","number":262,"title":"Add new dataset ANLI Round 1","user":{"login":"easonnie","id":11016329,"node_id":"MDQ6VXNlcjExMDE2MzI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11016329?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/easonnie","html_url":"https:\/\/github.com\/easonnie","followers_url":"https:\/\/api.github.com\/users\/easonnie\/followers","following_url":"https:\/\/api.github.com\/users\/easonnie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/easonnie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/easonnie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/easonnie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/easonnie\/orgs","repos_url":"https:\/\/api.github.com\/users\/easonnie\/repos","events_url":"https:\/\/api.github.com\/users\/easonnie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/easonnie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-11T04:14:57Z","updated_at":"2020-06-12T22:03:03Z","closed_at":"2020-06-12T22:03:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/262","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/262","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/262.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/262.patch","merged_at":null},"body":"Adding new dataset [ANLI](https:\/\/github.com\/facebookresearch\/anli\/).\r\n\r\nI'm not familiar with how to add new dataset. Let me know if there is any issue. I only include round 1 data here. There will be round 2, round 3 and more in the future with potentially different format. I think it will be better to separate them.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/262\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/262\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/261","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/261\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/261\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/261\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/261","id":636372380,"node_id":"MDU6SXNzdWU2MzYzNzIzODA=","number":261,"title":"Downloading dataset error with pyarrow.lib.RecordBatch","user":{"login":"cuent","id":5248968,"node_id":"MDQ6VXNlcjUyNDg5Njg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5248968?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cuent","html_url":"https:\/\/github.com\/cuent","followers_url":"https:\/\/api.github.com\/users\/cuent\/followers","following_url":"https:\/\/api.github.com\/users\/cuent\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cuent\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cuent\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cuent\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cuent\/orgs","repos_url":"https:\/\/api.github.com\/users\/cuent\/repos","events_url":"https:\/\/api.github.com\/users\/cuent\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cuent\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-10T16:04:19Z","updated_at":"2020-06-11T14:35:12Z","closed_at":"2020-06-11T14:35:12Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to download `sentiment140` and I have the following error\r\n\r\n```\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n    518         download_mode=download_mode,\r\n    519         ignore_verifications=ignore_verifications,\r\n--> 520         save_infos=save_infos,\r\n    521     )\r\n    522 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n    418                 verify_infos = not save_infos and not ignore_verifications\r\n    419                 self._download_and_prepare(\r\n--> 420                     dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n    421                 )\r\n    422                 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n    472             try:\r\n    473                 # Prepare split will record examples associated to the split\r\n--> 474                 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n    475             except OSError:\r\n    476                 raise OSError(\"Cannot find data file. \" + (self.MANUAL_DOWNLOAD_INSTRUCTIONS or \"\"))\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in _prepare_split(self, split_generator)\r\n    652         for key, record in utils.tqdm(generator, unit=\" examples\", total=split_info.num_examples, leave=False):\r\n    653             example = self.info.features.encode_example(record)\r\n--> 654             writer.write(example)\r\n    655         num_examples, num_bytes = writer.finalize()\r\n    656 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/arrow_writer.py in write(self, example, writer_batch_size)\r\n    143             self._build_writer(pa_table=pa.Table.from_pydict(example))\r\n    144         if writer_batch_size is not None and len(self.current_rows) >= writer_batch_size:\r\n--> 145             self.write_on_file()\r\n    146 \r\n    147     def write_batch(\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/arrow_writer.py in write_on_file(self)\r\n    127             else:\r\n    128                 # All good\r\n--> 129                 self._write_array_on_file(pa_array)\r\n    130             self.current_rows = []\r\n    131 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/arrow_writer.py in _write_array_on_file(self, pa_array)\r\n     96     def _write_array_on_file(self, pa_array):\r\n     97         \"\"\"Write a PyArrow Array\"\"\"\r\n---> 98         pa_batch = pa.RecordBatch.from_struct_array(pa_array)\r\n     99         self._num_bytes += pa_array.nbytes\r\n    100         self.pa_writer.write_batch(pa_batch)\r\n\r\nAttributeError: type object 'pyarrow.lib.RecordBatch' has no attribute 'from_struct_array'\r\n```\r\n\r\nI installed the last version and ran the following command:\r\n\r\n```python\r\nimport nlp\r\nsentiment140 = nlp.load_dataset('sentiment140', cache_dir='\/content')\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/261\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/261\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/260","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/260\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/260\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/260\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/260","id":636261118,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMyNDY3NDM5","number":260,"title":"Consistency fixes","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-10T13:44:42Z","updated_at":"2020-06-11T10:34:37Z","closed_at":"2020-06-11T10:34:36Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/260","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/260","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/260.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/260.patch","merged_at":"2020-06-11T10:34:36Z"},"body":"A few bugs I've found while hacking","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/260\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/260\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/259","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/259\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/259\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/259\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/259","id":636239529,"node_id":"MDU6SXNzdWU2MzYyMzk1Mjk=","number":259,"title":"documentation missing how to split a dataset","user":{"login":"fotisj","id":2873355,"node_id":"MDQ6VXNlcjI4NzMzNTU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2873355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fotisj","html_url":"https:\/\/github.com\/fotisj","followers_url":"https:\/\/api.github.com\/users\/fotisj\/followers","following_url":"https:\/\/api.github.com\/users\/fotisj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fotisj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fotisj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fotisj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fotisj\/orgs","repos_url":"https:\/\/api.github.com\/users\/fotisj\/repos","events_url":"https:\/\/api.github.com\/users\/fotisj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fotisj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-06-10T13:18:13Z","updated_at":"2020-06-18T22:20:24Z","closed_at":"2020-06-18T22:20:24Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I am trying to understand how to split a dataset ( as arrow_dataset). \r\nI know I can do something like this to access a split which is already in the original dataset : \r\n\r\n`ds_test = nlp.load_dataset('imdb, split='test') `\r\n\r\nBut how can I split ds_test into a test and a validation set (without reading the data into memory and keeping the arrow_dataset as container)?\r\nI guess it has something to do with the module split :-) but there is no real documentation in the code but only a reference to a longer description: \r\n\r\n> See the  [guide on splits](https:\/\/github.com\/huggingface\/nlp\/tree\/master\/docs\/splits.md)  for more information.\r\n\r\nBut the guide seems to be missing.\r\n\r\nTo clarify: I know that this has been modelled after the dataset of tensorflow and that some of the documentation there can be used [like this one](https:\/\/www.tensorflow.org\/datasets\/splits). But to come back to the example above: I cannot simply split the testset doing this: \r\n`ds_test = nlp.load_dataset('imdb, split='test'[:5000]) `\r\n`ds_val = nlp.load_dataset('imdb, split='test'[5000:])`\r\n\r\nbecause the imdb test data is sorted by class (probably not a good idea anyway)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/259\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/259\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/258","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/258\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/258\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/258\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/258","id":635859525,"node_id":"MDU6SXNzdWU2MzU4NTk1MjU=","number":258,"title":"Why is dataset after tokenization far more larger than the orginal one ?","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-06-10T01:27:07Z","updated_at":"2020-06-10T12:46:34Z","closed_at":"2020-06-10T12:46:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I tokenize wiki dataset by `map` and cache the results.\r\n```\r\ndef tokenize_tfm(example):\r\n    example['input_ids'] = hf_fast_tokenizer.convert_tokens_to_ids(hf_fast_tokenizer.tokenize(example['text']))\r\n    return example\r\nwiki = nlp.load_dataset('wikipedia', '20200501.en', cache_dir=cache_dir)['train']\r\nwiki.map(tokenize_tfm, cache_file_name=cache_dir\/\"wikipedia\/20200501.en\/1.0.0\/tokenized_wiki.arrow\")\r\n```\r\nand when I see their size\r\n```\r\nls -l --block-size=M\r\n17460M  wikipedia-train.arrow\r\n47511M  tokenized_wiki.arrow\r\n```\r\nThe tokenized one is over 2x size of original one.\r\nIs there something I did wrong ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/258\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/258\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/257","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/257\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/257\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/257\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/257","id":635620979,"node_id":"MDU6SXNzdWU2MzU2MjA5Nzk=","number":257,"title":"Tokenizer pickling issue fix not landed in `nlp` yet?","user":{"login":"sarahwie","id":8027676,"node_id":"MDQ6VXNlcjgwMjc2NzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8027676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sarahwie","html_url":"https:\/\/github.com\/sarahwie","followers_url":"https:\/\/api.github.com\/users\/sarahwie\/followers","following_url":"https:\/\/api.github.com\/users\/sarahwie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sarahwie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sarahwie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sarahwie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sarahwie\/orgs","repos_url":"https:\/\/api.github.com\/users\/sarahwie\/repos","events_url":"https:\/\/api.github.com\/users\/sarahwie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sarahwie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-09T17:12:34Z","updated_at":"2020-06-10T21:45:32Z","closed_at":"2020-06-09T17:26:53Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Unless I recreate an arrow_dataset from my loaded nlp dataset myself (which I think does not use the cache by default), I get the following error when applying the map function:\r\n\r\n```\r\ndataset = nlp.load_dataset('cos_e')\r\ntokenizer = GPT2TokenizerFast.from_pretrained('gpt2', cache_dir=cache_dir)\r\n\r\nfor split in dataset.keys():\r\n    dataset[split].map(lambda x: some_function(x, tokenizer))\r\n```\r\n```\r\n06\/09\/2020 10:09:19 - INFO - nlp.builder -   Constructing Dataset for split train[:10], from \/home\/sarahw\/.cache\/huggingface\/datasets\/cos_e\/default\/0.0.1\r\nTraceback (most recent call last):\r\n  File \"generation\/input_to_label_and_rationale.py\", line 390, in \r\n    main()\r\n  File \"generation\/input_to_label_and_rationale.py\", line 263, in main\r\n    dataset[split] = dataset[split].map(lambda x: input_to_explanation_plus_label(x, tokenizer, max_length, datasource=data_args.task_name, wt5=(model_class=='t5'), expl_only=model_args.rationale_only), batched=False)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/site-packages\/nlp\/arrow_dataset.py\", line 522, in map\r\n    cache_file_name = self._get_cache_file_path(function, cache_kwargs)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/site-packages\/nlp\/arrow_dataset.py\", line 381, in _get_cache_file_path\r\n    function_bytes = dumps(function)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/site-packages\/nlp\/utils\/py_utils.py\", line 257, in dumps\r\n    dump(obj, file)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/site-packages\/nlp\/utils\/py_utils.py\", line 250, in dump\r\n    Pickler(file).dump(obj)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/site-packages\/dill\/_dill.py\", line 445, in dump\r\n    StockPickler.dump(self, obj)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 485, in dump\r\n    self.save(obj)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 558, in save\r\n    f(self, obj)  # Call unbound method with explicit self\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/site-packages\/dill\/_dill.py\", line 1410, in save_function\r\n    pickler.save_reduce(_create_function, (obj.__code__,\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 690, in save_reduce\r\n    save(args)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 558, in save\r\n    f(self, obj)  # Call unbound method with explicit self\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 899, in save_tuple\r\n    save(element)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 558, in save\r\n    f(self, obj)  # Call unbound method with explicit self\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 899, in save_tuple\r\n    save(element)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 558, in save\r\n    f(self, obj)  # Call unbound method with explicit self\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/site-packages\/dill\/_dill.py\", line 1147, in save_cell\r\n    pickler.save_reduce(_create_cell, (f,), obj=obj)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 690, in save_reduce\r\n    save(args)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 558, in save\r\n    f(self, obj)  # Call unbound method with explicit self\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 884, in save_tuple\r\n    save(element)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 601, in save\r\n    self.save_reduce(obj=obj, *rv)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 715, in save_reduce\r\n    save(state)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 558, in save\r\n    f(self, obj)  # Call unbound method with explicit self\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/site-packages\/dill\/_dill.py\", line 912, in save_module_dict\r\n    StockPickler.save_dict(pickler, obj)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 969, in save_dict\r\n    self._batch_setitems(obj.items())\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 995, in _batch_setitems\r\n    save(v)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 601, in save\r\n    self.save_reduce(obj=obj, *rv)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 715, in save_reduce\r\n    save(state)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 558, in save\r\n    f(self, obj)  # Call unbound method with explicit self\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/site-packages\/dill\/_dill.py\", line 912, in save_module_dict\r\n    StockPickler.save_dict(pickler, obj)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 969, in save_dict\r\n    self._batch_setitems(obj.items())\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 995, in _batch_setitems\r\n    save(v)\r\n  File \"\/home\/sarahw\/miniconda3\/envs\/project_huggingface\/lib\/python3.8\/pickle.py\", line 576, in save\r\n    rv = reduce(self.proto)\r\nTypeError: cannot pickle 'Tokenizer' object\r\n```\r\nFix seems to be in the tokenizers [`0.8.0.dev1 pre-release`](https:\/\/github.com\/huggingface\/tokenizers\/issues\/87), which I can't install with any package managers. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/257\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/257\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/256","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/256\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/256\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/256\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/256","id":635596295,"node_id":"MDU6SXNzdWU2MzU1OTYyOTU=","number":256,"title":"[Feature request] Add a feature to dataset","user":{"login":"sarahwie","id":8027676,"node_id":"MDQ6VXNlcjgwMjc2NzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8027676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sarahwie","html_url":"https:\/\/github.com\/sarahwie","followers_url":"https:\/\/api.github.com\/users\/sarahwie\/followers","following_url":"https:\/\/api.github.com\/users\/sarahwie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sarahwie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sarahwie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sarahwie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sarahwie\/orgs","repos_url":"https:\/\/api.github.com\/users\/sarahwie\/repos","events_url":"https:\/\/api.github.com\/users\/sarahwie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sarahwie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-06-09T16:38:12Z","updated_at":"2020-06-09T16:51:42Z","closed_at":"2020-06-09T16:51:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Is there a straightforward way to add a field to the arrow_dataset, prior to performing map?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/256\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/256\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/255","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/255\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/255\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/255\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/255","id":635300822,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMxNjg3MDM0","number":255,"title":"Add dataset\/piaf","user":{"login":"RachelKer","id":36986299,"node_id":"MDQ6VXNlcjM2OTg2Mjk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36986299?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/RachelKer","html_url":"https:\/\/github.com\/RachelKer","followers_url":"https:\/\/api.github.com\/users\/RachelKer\/followers","following_url":"https:\/\/api.github.com\/users\/RachelKer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/RachelKer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/RachelKer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/RachelKer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/RachelKer\/orgs","repos_url":"https:\/\/api.github.com\/users\/RachelKer\/repos","events_url":"https:\/\/api.github.com\/users\/RachelKer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/RachelKer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-09T10:16:01Z","updated_at":"2020-06-12T08:31:27Z","closed_at":"2020-06-12T08:31:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/255","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/255","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/255.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/255.patch","merged_at":"2020-06-12T08:31:27Z"},"body":"Small SQuAD-like French QA dataset [PIAF](https:\/\/www.aclweb.org\/anthology\/2020.lrec-1.673.pdf)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/255\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/255\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/254","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/254\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/254\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/254\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/254","id":635057568,"node_id":"MDU6SXNzdWU2MzUwNTc1Njg=","number":254,"title":"[Feature request] Be able to remove a specific sample of the dataset","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-09T02:22:13Z","updated_at":"2020-06-09T08:41:38Z","closed_at":"2020-06-09T08:41:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"As mentioned in #117, it's currently not possible to remove a sample of the dataset.\r\n\r\nBut it is a important use case : After applying some preprocessing, some samples might be empty for example. We should be able to remove these samples from the dataset, or at least mark them as `removed` so when iterating the dataset, we don't iterate these samples.\r\n\r\nI think it should be a feature. What do you think ?\r\n\r\n---\r\n\r\nAny work-around in the meantime ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/254\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/254\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/253","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/253\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/253\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/253\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/253","id":634791939,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMxMjgwOTYz","number":253,"title":"add flue dataset","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-06-08T17:11:09Z","updated_at":"2020-07-16T07:50:59Z","closed_at":"2020-07-16T07:50:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/253","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/253","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/253.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/253.patch","merged_at":null},"body":"This PR add the Flue dataset as requested in this issue #223  . @lbourdois made  a detailed description in that issue.\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/253\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/253\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/252","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/252\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/252\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/252\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/252","id":634563239,"node_id":"MDU6SXNzdWU2MzQ1NjMyMzk=","number":252,"title":"NonMatchingSplitsSizesError error when reading the IMDB dataset","user":{"login":"antmarakis","id":17463361,"node_id":"MDQ6VXNlcjE3NDYzMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17463361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/antmarakis","html_url":"https:\/\/github.com\/antmarakis","followers_url":"https:\/\/api.github.com\/users\/antmarakis\/followers","following_url":"https:\/\/api.github.com\/users\/antmarakis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/antmarakis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/antmarakis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/antmarakis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/antmarakis\/orgs","repos_url":"https:\/\/api.github.com\/users\/antmarakis\/repos","events_url":"https:\/\/api.github.com\/users\/antmarakis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/antmarakis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-06-08T12:26:24Z","updated_at":"2021-08-27T15:20:58Z","closed_at":"2020-06-08T14:01:26Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi!\r\n\r\nI am trying to load the `imdb` dataset with this line:\r\n\r\n`dataset = nlp.load_dataset('imdb', data_dir='\/A\/PATH', cache_dir='\/A\/PATH')`\r\n\r\nbut I am getting the following error:\r\n\r\n```\r\nTraceback (most recent call last):\r\n  File \"\", line 1, in \r\n  File \"\/mounts\/Users\/cisintern\/antmarakis\/anaconda3\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 517, in load_dataset\r\n    save_infos=save_infos,\r\n  File \"\/mounts\/Users\/cisintern\/antmarakis\/anaconda3\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 363, in download_and_prepare\r\n    dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n  File \"\/mounts\/Users\/cisintern\/antmarakis\/anaconda3\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 421, in _download_and_prepare\r\n    verify_splits(self.info.splits, split_dict)\r\n  File \"\/mounts\/Users\/cisintern\/antmarakis\/anaconda3\/lib\/python3.7\/site-packages\/nlp\/utils\/info_utils.py\", line 70, in verify_splits\r\n    raise NonMatchingSplitsSizesError(str(bad_splits))\r\nnlp.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=33442202, num_examples=25000, dataset_name='imdb'), 'recorded': SplitInfo(name='train', num_bytes=5929447, num_examples=4537, dataset_name='imdb')}, {'expected': SplitInfo(name='unsupervised', num_bytes=67125548, num_examples=50000, dataset_name='imdb'), 'recorded': SplitInfo(name='unsupervised', num_bytes=0, num_examples=0, dataset_name='imdb')}]\r\n```\r\n\r\nAm I overlooking something? Thanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/252\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/252\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/251","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/251\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/251\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/251\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/251","id":634544977,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMxMDgwMDkw","number":251,"title":"Better access to all dataset information","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-08T11:56:50Z","updated_at":"2020-06-12T08:13:00Z","closed_at":"2020-06-12T08:12:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/251","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/251","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/251.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/251.patch","merged_at":"2020-06-12T08:12:58Z"},"body":"Moves all the dataset info down one level from `dataset.info.XXX` to `dataset.XXX`\r\nThis way it's easier to access `dataset.feature['label']` for instance\r\n\r\nAlso, add the original split instructions used to create the dataset in `dataset.split`\r\nEx:\r\n```\r\nfrom nlp import load_dataset\r\nstsb = load_dataset('glue', name='stsb', split='train')\r\nstsb.split\r\n>>> NamedSplit('train')\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/251\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/251\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/250","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/250\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/250\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/250\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/250","id":634416751,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMwOTcyMzg4","number":250,"title":"Remove checksum download in c4","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-08T09:13:00Z","updated_at":"2020-08-25T07:04:56Z","closed_at":"2020-06-08T09:16:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/250","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/250","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/250.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/250.patch","merged_at":"2020-06-08T09:16:59Z"},"body":"There was a line from the original tfds script that was still there and causing issues when loading the c4 script. This one should fix #233 and allow anyone to load the c4 script to generate the dataset","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/250\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/250\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/249","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/249\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/249\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/249\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/249","id":633393443,"node_id":"MDU6SXNzdWU2MzMzOTM0NDM=","number":249,"title":"[Dataset created] some critical small issues when I was creating a dataset","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2020-06-07T12:58:54Z","updated_at":"2020-06-12T08:28:51Z","closed_at":"2020-06-12T08:28:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I successfully created a dataset and has made a pr #248.\r\nBut I have encountered several problems when I was creating it, and those should be easy to fix.\r\n\r\n1. Not found dataset_info.json\r\nshould be fixed by #241 , eager to wait it be merged.\r\n\r\n2. Forced to install `apach_beam`\r\nIf we should install it, then it might be better to include it in the pakcage dependency or specified in `CONTRIBUTING.md`\r\n```\r\nTraceback (most recent call last):\r\n  File \"nlp-cli\", line 10, in \r\n    from nlp.commands.run_beam import RunBeamCommand\r\n  File \"\/home\/yisiang\/nlp\/src\/nlp\/commands\/run_beam.py\", line 6, in \r\n    import apache_beam as beam\r\nModuleNotFoundError: No module named 'apache_beam'\r\n```\r\n\r\n3.  `cached_dir` is `None`\r\n```\r\nFile \"\/home\/yisiang\/nlp\/src\/nlp\/datasets\/bookscorpus\/aea0bd5142d26df645a8fce23d6110bb95ecb81772bb2a1f29012e329191962c\/bookscorpus.py\", line 88, in _split_generators\r\n    downloaded_path_or_paths = dl_manager.download_custom(_GDRIVE_FILE_ID, download_file_from_google_drive)\r\n  File \"\/home\/yisiang\/nlp\/src\/nlp\/utils\/download_manager.py\", line 128, in download_custom\r\n    downloaded_path_or_paths = map_nested(url_to_downloaded_path, url_or_urls)\r\n  File \"\/home\/yisiang\/nlp\/src\/nlp\/utils\/py_utils.py\", line 172, in map_nested\r\n    return function(data_struct)\r\n  File \"\/home\/yisiang\/nlp\/src\/nlp\/utils\/download_manager.py\", line 126, in url_to_downloaded_path\r\n    return os.path.join(self._download_config.cache_dir, hash_url_to_filename(url))\r\n  File \"\/home\/yisiang\/miniconda3\/envs\/nlppr\/lib\/python3.7\/posixpath.py\", line 80, in join\r\n    a = os.fspath(a)\r\n```\r\nThis is because this line\r\nhttps:\/\/github.com\/huggingface\/nlp\/blob\/2e0a8639a79b1abc848cff5c669094d40bba0f63\/src\/nlp\/commands\/test.py#L30-L32\r\nAnd I add `--cache_dir=\"....\"` to `python nlp-cli test datasets\/ --save_infos --all_configs`  in the doc, finally I could pass this error.\r\nBut it seems to ignore my arg and use `\/home\/yisiang\/.cache\/huggingface\/datasets\/bookscorpus\/plain_text\/1.0.0` as cahe_dir\r\n\r\n4. There is no `pytest`\r\nSo maybe in the doc we should specify a step to install pytest\r\n\r\n5. Not enough capacity in my `\/tmp`\r\nWhen run test for dummy data, I don't know why it ask me for 5.6g to download something, \r\n```\r\ndef download_and_prepare\r\n...\r\nif not utils.has_sufficient_disk_space(self.info.size_in_bytes or 0, directory=self._cache_dir_root):\r\n                raise IOError(\r\n                    \"Not enough disk space. Needed: {} (download: {}, generated: {})\".format(\r\n                        utils.size_str(self.info.size_in_bytes or 0),\r\n                        utils.size_str(self.info.download_size or 0),\r\n>                       utils.size_str(self.info.dataset_size or 0),\r\n                    )\r\n                )\r\nE               OSError: Not enough disk space. Needed: 5.62 GiB (download: 1.10 GiB, generated: 4.52 GiB)\r\n```\r\nI add a `processed_temp_dir=\"some\/dir\"; raw_temp_dir=\"another\/dir\"` to 71, and the test passed\r\nhttps:\/\/github.com\/huggingface\/nlp\/blob\/a67a6c422dece904b65d18af65f0e024e839dbe8\/tests\/test_dataset_common.py#L70-L72\r\n\r\nI suggest we can create tmp dir under the `\/home\/user\/tmp` but not `\/tmp`, because take our lab server for example, everyone use `\/tmp` thus it has not much capacity. Or at least we can improve error message, so the user know is what directory has no space and how many has it lefted. Or we could do both.\r\n\r\n6. name of datasets\r\nI was surprised by the dataset name `books_corpus`, and didn't know it is from `class BooksCorpus(nlp.GeneratorBasedBuilder)` . I change it to `Bookscorpus` afterwards. I think this point shold be also on the doc.\r\n\r\n7. More thorough doc to how to create `dataset.py`\r\nI believe there will be.\r\n\r\n**Feel free to close this issue** if you think these are solved.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/249\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/249\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/248","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/248\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/248\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/248\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/248","id":633390427,"node_id":"MDExOlB1bGxSZXF1ZXN0NDMwMDQ0MzU0","number":248,"title":"add Toronto BooksCorpus","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-06-07T12:54:56Z","updated_at":"2020-06-12T08:45:03Z","closed_at":"2020-06-12T08:45:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/248","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/248","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/248.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/248.patch","merged_at":"2020-06-12T08:45:02Z"},"body":"1. I knew there is a branch `toronto_books_corpus`\r\n - After I downloaded it, I found it is all non-english, and only have one row. \r\n- It seems that it cites the wrong paper\r\n- according to papar using it, it is called `BooksCorpus` but not `TornotoBooksCorpus`\r\n\r\n2. It use a text mirror in google drive\r\n- `bookscorpus.py` include a function `download_file_from_google_drive` , maybe you will want to put it elsewhere.\r\n- text mirror is found in this [comment on the issue](https:\/\/github.com\/soskek\/bookcorpus\/issues\/24#issuecomment-556024973), and it said to have the same statistics as the one in the paper.\r\n- You may want to download it and put it on your gs in case of it disappears someday.\r\n\r\n3. Copyright ?\r\nThe paper has said\r\n\r\n> **The BookCorpus Dataset.** In order to train our sentence similarity model we collected a corpus of 11,038 books ***from the web***. These are __**free books written by yet unpublished authors**__. We only included books that had more than 20K words in order to filter out perhaps noisier shorter stories. The dataset has books in 16 different genres, e.g., Romance (2,865 books), Fantasy (1,479), Science fiction (786), Teen (430), etc. Table 2 highlights the summary statistics of our book corpus.\r\n\r\nand we have changed the form (not books), so I don't think it should have that problems. Or we can state that use it at your own risk or only for academic use. I know @thomwolf should know these things more.\r\n\r\nThis should solved #131 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/248\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/248\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/247","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/247\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/247\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/247\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/247","id":632380078,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI5MTMwMzQ2","number":247,"title":"Make all dataset downloads deterministic by applying `sorted` to glob and os.listdir","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-06-06T11:02:10Z","updated_at":"2020-06-08T09:18:16Z","closed_at":"2020-06-08T09:18:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/247","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/247","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/247.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/247.patch","merged_at":"2020-06-08T09:18:14Z"},"body":"This PR makes all datasets loading deterministic by applying `sorted()` to all `glob.glob` and `os.listdir` statements.\r\n\r\nAre there other \"non-deterministic\" functions apart from `glob.glob()` and `os.listdir()` that you can think of @thomwolf @lhoestq @mariamabarham @jplu ?\r\n\r\n**Important** \r\nIt does break backward compatibility for these datasets because\r\n1. When loading the complete dataset the order in which the examples are saved is different now\r\n2. When loading only part of a split, the examples themselves might be different.\r\n\r\n@patrickvonplaten - the nlp \/ longformer notebook has to be updated since the examples might now be different","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/247\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/247\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/246","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/246\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/246\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/246\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/246","id":632380054,"node_id":"MDU6SXNzdWU2MzIzODAwNTQ=","number":246,"title":"What is the best way to cache a dataset? ","user":{"login":"Mistobaan","id":112599,"node_id":"MDQ6VXNlcjExMjU5OQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/112599?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mistobaan","html_url":"https:\/\/github.com\/Mistobaan","followers_url":"https:\/\/api.github.com\/users\/Mistobaan\/followers","following_url":"https:\/\/api.github.com\/users\/Mistobaan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mistobaan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mistobaan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mistobaan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mistobaan\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mistobaan\/repos","events_url":"https:\/\/api.github.com\/users\/Mistobaan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mistobaan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-06T11:02:07Z","updated_at":"2020-07-09T09:15:07Z","closed_at":"2020-07-09T09:15:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"For example if I want to use streamlit with a nlp dataset:\r\n\r\n```\r\n@st.cache\r\ndef load_data():\r\n    return nlp.load_dataset('squad')\r\n```\r\nThis code raises the error \"uncachable object\"\r\n\r\nRight now I just fixed with a constant for my specific case:\r\n```\r\n    @st.cache(hash_funcs={pyarrow.lib.Buffer: lambda b: 0})\r\n```\r\nBut I was curious to know what is the best way in general\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/246\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/246\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/245","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/245\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/245\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/245\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/245","id":631985108,"node_id":"MDU6SXNzdWU2MzE5ODUxMDg=","number":245,"title":"SST-2 test labels are all -1","user":{"login":"jxmorris12","id":13238952,"node_id":"MDQ6VXNlcjEzMjM4OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13238952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jxmorris12","html_url":"https:\/\/github.com\/jxmorris12","followers_url":"https:\/\/api.github.com\/users\/jxmorris12\/followers","following_url":"https:\/\/api.github.com\/users\/jxmorris12\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jxmorris12\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jxmorris12\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jxmorris12\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jxmorris12\/orgs","repos_url":"https:\/\/api.github.com\/users\/jxmorris12\/repos","events_url":"https:\/\/api.github.com\/users\/jxmorris12\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jxmorris12\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-06-05T21:41:42Z","updated_at":"2021-12-08T00:47:32Z","closed_at":"2020-06-06T16:56:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to test a model on the SST-2 task, but all the labels I see in the test set are -1.\r\n```\r\n>>> import nlp\r\n>>> glue = nlp.load_dataset('glue', 'sst2')\r\n>>> glue\r\n{'train': Dataset(schema: {'sentence': 'string', 'label': 'int64', 'idx': 'int32'}, num_rows: 67349), 'validation': Dataset(schema: {'sentence': 'string', 'label': 'int64', 'idx': 'int32'}, num_rows: 872), 'test': Dataset(schema: {'sentence': 'string', 'label': 'int64', 'idx': 'int32'}, num_rows: 1821)}\r\n>>> list(l['label'] for l in glue['test'])\r\n[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/245\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/245\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/244","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/244\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/244\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/244\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/244","id":631869155,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI4NjgxMTcx","number":244,"title":"Add Allocin\u00e9 Dataset","user":{"login":"TheophileBlard","id":37028092,"node_id":"MDQ6VXNlcjM3MDI4MDky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37028092?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TheophileBlard","html_url":"https:\/\/github.com\/TheophileBlard","followers_url":"https:\/\/api.github.com\/users\/TheophileBlard\/followers","following_url":"https:\/\/api.github.com\/users\/TheophileBlard\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TheophileBlard\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TheophileBlard\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TheophileBlard\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TheophileBlard\/orgs","repos_url":"https:\/\/api.github.com\/users\/TheophileBlard\/repos","events_url":"https:\/\/api.github.com\/users\/TheophileBlard\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TheophileBlard\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-06-05T19:19:26Z","updated_at":"2020-06-11T07:47:26Z","closed_at":"2020-06-11T07:47:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/244","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/244","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/244.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/244.patch","merged_at":"2020-06-11T07:47:26Z"},"body":"This is a french binary sentiment classification dataset, which was used to train this model: https:\/\/huggingface.co\/tblard\/tf-allocine.\r\n\r\nBasically, it's a french \"IMDB\" dataset, with more reviews.\r\n\r\nMore info on [this repo](https:\/\/github.com\/TheophileBlard\/french-sentiment-analysis-with-bert). ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/244\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/244\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/243","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/243\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/243\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/243\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/243","id":631735848,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI4NTY2MTEy","number":243,"title":"Specify utf-8 encoding for GLUE","user":{"login":"patpizio","id":15801338,"node_id":"MDQ6VXNlcjE1ODAxMzM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15801338?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patpizio","html_url":"https:\/\/github.com\/patpizio","followers_url":"https:\/\/api.github.com\/users\/patpizio\/followers","following_url":"https:\/\/api.github.com\/users\/patpizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patpizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patpizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patpizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patpizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/patpizio\/repos","events_url":"https:\/\/api.github.com\/users\/patpizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patpizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-05T16:33:00Z","updated_at":"2020-06-17T21:16:06Z","closed_at":"2020-06-08T08:42:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/243","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/243","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/243.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/243.patch","merged_at":"2020-06-08T08:42:01Z"},"body":"#242 \r\nThis makes the GLUE-MNLI dataset readable on my machine, not sure if it's a Windows-only bug.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/243\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/243\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/242","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/242\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/242\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/242\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/242","id":631733683,"node_id":"MDU6SXNzdWU2MzE3MzM2ODM=","number":242,"title":"UnicodeDecodeError when downloading GLUE-MNLI","user":{"login":"patpizio","id":15801338,"node_id":"MDQ6VXNlcjE1ODAxMzM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15801338?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patpizio","html_url":"https:\/\/github.com\/patpizio","followers_url":"https:\/\/api.github.com\/users\/patpizio\/followers","following_url":"https:\/\/api.github.com\/users\/patpizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patpizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patpizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patpizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patpizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/patpizio\/repos","events_url":"https:\/\/api.github.com\/users\/patpizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patpizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-05T16:30:01Z","updated_at":"2020-06-09T16:06:47Z","closed_at":"2020-06-08T08:45:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I run\r\n```python\r\ndataset = nlp.load_dataset('glue', 'mnli')\r\n```\r\nI get an encoding error (could it be because I'm using Windows?) :\r\n```python\r\n# Lots of error log lines later...\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\tqdm\\std.py in __iter__(self)\r\n   1128         try:\r\n-> 1129             for obj in iterable:\r\n   1130                 yield obj\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\datasets\\glue\\5256cc2368cf84497abef1f1a5f66648522d5854b225162148cb8fc78a5a91cc\\glue.py in _generate_examples(self, data_file, split, mrpc_files)\r\n    529 \r\n--> 530                 for n, row in enumerate(reader):\r\n    531                     if is_cola_non_test:\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\csv.py in __next__(self)\r\n    110             self.fieldnames\r\n--> 111         row = next(self.reader)\r\n    112         self.line_num = self.reader.line_num\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\encodings\\cp1252.py in decode(self, input, final)\r\n     22     def decode(self, input, final=False):\r\n---> 23         return codecs.charmap_decode(input,self.errors,decoding_table)[0]\r\n     24 \r\n\r\nUnicodeDecodeError: 'charmap' codec can't decode byte 0x9d in position 6744: character maps to \r\n```\r\nAnyway this can be solved by specifying to decode in UTF when reading the csv file. I am proposing a PR if that's okay.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/242\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/242\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/241","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/241\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/241\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/241\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/241","id":631703079,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI4NTQwMDM0","number":241,"title":"Fix empty cache dir","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-06-05T15:45:22Z","updated_at":"2020-06-08T08:35:33Z","closed_at":"2020-06-08T08:35:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/241","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/241","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/241.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/241.patch","merged_at":"2020-06-08T08:35:31Z"},"body":"If the cache dir of a dataset is empty, the dataset fails to load and throws a FileNotFounfError. We could end up with empty cache dir because there was a line in the code that created the cache dir without using a temp dir. Using a temp dir is useful as it gets renamed to the real cache dir only if the full process is successful.\r\n\r\nSo I removed this bad line, and I also reordered things a bit to make sure that we always use a temp dir. I also added warning if we still end up with empty cache dirs in the future.\r\n\r\nThis should fix #239\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/241\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/241\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/240","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/240\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/240\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/240\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/240","id":631434677,"node_id":"MDU6SXNzdWU2MzE0MzQ2Nzc=","number":240,"title":"Deterministic dataset loading","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-06-05T09:03:26Z","updated_at":"2020-06-08T09:18:14Z","closed_at":"2020-06-08T09:18:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When calling:\r\n```python \r\nimport nlp\r\ndataset = nlp.load_dataset(\"trivia_qa\", split=\"validation[:1%]\")\r\n```\r\n\r\nthe resulting dataset is not deterministic over different google colabs. \r\nAfter talking to @thomwolf, I suspect the reason to be the use of `glob.glob` in line:\r\n\r\nhttps:\/\/github.com\/huggingface\/nlp\/blob\/2e0a8639a79b1abc848cff5c669094d40bba0f63\/datasets\/trivia_qa\/trivia_qa.py#L180\r\n\r\nwhich seems to return an ordering of files that depends on the filesystem:\r\nhttps:\/\/stackoverflow.com\/questions\/6773584\/how-is-pythons-glob-glob-ordered\r\n\r\nI think we should go through all the dataset scripts and make sure to have deterministic behavior.\r\n\r\nA simple solution for `glob.glob()` would be to just replace it with `sorted(glob.glob())` to have everything sorted by name. \r\n\r\nWhat do you think @lhoestq?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/240\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/240\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/239","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/239\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/239\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/239\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/239","id":631340440,"node_id":"MDU6SXNzdWU2MzEzNDA0NDA=","number":239,"title":"[Creating new dataset] Not found dataset_info.json","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2020-06-05T06:15:04Z","updated_at":"2020-06-07T13:01:04Z","closed_at":"2020-06-07T13:01:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I am trying to create Toronto Book Corpus. #131 \r\n\r\nI ran\r\n`~\/nlp % python nlp-cli test datasets\/bookcorpus --save_infos --all_configs`\r\nbut this doesn't create `dataset_info.json` and try to use it\r\n```\r\nINFO:nlp.load:Checking datasets\/bookcorpus\/bookcorpus.py for additional imports.\r\nINFO:filelock:Lock 139795325778640 acquired on datasets\/bookcorpus\/bookcorpus.py.lock\r\nINFO:nlp.load:Found main folder for dataset datasets\/bookcorpus\/bookcorpus.py at \/home\/yisiang\/miniconda3\/envs\/ml\/lib\/python3.7\/site-packages\/nlp\/datasets\/bookcorpus\r\nINFO:nlp.load:Found specific version folder for dataset datasets\/bookcorpus\/bookcorpus.py at \/home\/yisiang\/miniconda3\/envs\/ml\/lib\/python3.7\/site-packages\/nlp\/datasets\/bookcorpus\/8e84759446cf68d0b0deb3417e60cc331f30a3bbe58843de18a0f48e87d1efd9\r\nINFO:nlp.load:Found script file from datasets\/bookcorpus\/bookcorpus.py to \/home\/yisiang\/miniconda3\/envs\/ml\/lib\/python3.7\/site-packages\/nlp\/datasets\/bookcorpus\/8e84759446cf68d0b0deb3417e60cc331f30a3bbe58843de18a0f48e87d1efd9\/bookcorpus.py\r\nINFO:nlp.load:Couldn't find dataset infos file at datasets\/bookcorpus\/dataset_infos.json\r\nINFO:nlp.load:Found metadata file for dataset datasets\/bookcorpus\/bookcorpus.py at \/home\/yisiang\/miniconda3\/envs\/ml\/lib\/python3.7\/site-packages\/nlp\/datasets\/bookcorpus\/8e84759446cf68d0b0deb3417e60cc331f30a3bbe58843de18a0f48e87d1efd9\/bookcorpus.json\r\nINFO:filelock:Lock 139795325778640 released on datasets\/bookcorpus\/bookcorpus.py.lock\r\nINFO:nlp.builder:Overwrite dataset info from restored data version.\r\nINFO:nlp.info:Loading Dataset info from \/home\/yisiang\/.cache\/huggingface\/datasets\/book_corpus\/plain_text\/1.0.0\r\nTraceback (most recent call last):\r\n  File \"nlp-cli\", line 37, in \r\n    service.run()\r\n  File \"\/home\/yisiang\/miniconda3\/envs\/ml\/lib\/python3.7\/site-packages\/nlp\/commands\/test.py\", line 78, in run\r\n    builders.append(builder_cls(name=config.name, data_dir=self._data_dir))\r\n  File \"\/home\/yisiang\/miniconda3\/envs\/ml\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 610, in __init__\r\n    super(GeneratorBasedBuilder, self).__init__(*args, **kwargs)\r\n  File \"\/home\/yisiang\/miniconda3\/envs\/ml\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 152, in __init__\r\n    self.info = DatasetInfo.from_directory(self._cache_dir)\r\n  File \"\/home\/yisiang\/miniconda3\/envs\/ml\/lib\/python3.7\/site-packages\/nlp\/info.py\", line 157, in from_directory\r\n    with open(os.path.join(dataset_info_dir, DATASET_INFO_FILENAME), \"r\") as f:\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/home\/yisiang\/.cache\/huggingface\/datasets\/book_corpus\/plain_text\/1.0.0\/dataset_info.json'\r\n```\r\nbtw, `ls \/home\/yisiang\/.cache\/huggingface\/datasets\/book_corpus\/plain_text\/1.0.0\/` show me nothing is in the directory.\r\n\r\nI have also pushed the script to my fork [bookcorpus.py](https:\/\/github.com\/richardyy1188\/nlp\/blob\/bookcorpusdev\/datasets\/bookcorpus\/bookcorpus.py).\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/239\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/239\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/238","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/238\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/238\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/238\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/238","id":631260143,"node_id":"MDU6SXNzdWU2MzEyNjAxNDM=","number":238,"title":"[Metric] Bertscore : Warning : Empty candidate sentence; Setting recall to be 0.","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[{"id":2067393914,"node_id":"MDU6TGFiZWwyMDY3MzkzOTE0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20bug","name":"metric bug","color":"25b21e","default":false,"description":"A bug in a metric script"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-05T02:14:47Z","updated_at":"2020-06-29T17:10:19Z","closed_at":"2020-06-29T17:10:19Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When running BERT-Score, I'm meeting this warning :\r\n\r\n> Warning: Empty candidate sentence; Setting recall to be 0.\r\n\r\nCode :\r\n\r\n```\r\nimport nlp\r\nmetric = nlp.load_metric(\"bertscore\")\r\nscores = metric.compute([\"swag\", \"swags\"], [\"swags\", \"totally something different\"], lang=\"en\", device=0)\r\n```\r\n\r\n---\r\n\r\n**What am I doing wrong \/ How can I hide this warning ?**","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/238\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/238\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/237","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/237\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/237\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/237\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/237","id":631199940,"node_id":"MDU6SXNzdWU2MzExOTk5NDA=","number":237,"title":"Can't download MultiNLI","user":{"login":"patpizio","id":15801338,"node_id":"MDQ6VXNlcjE1ODAxMzM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15801338?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patpizio","html_url":"https:\/\/github.com\/patpizio","followers_url":"https:\/\/api.github.com\/users\/patpizio\/followers","following_url":"https:\/\/api.github.com\/users\/patpizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patpizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patpizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patpizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patpizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/patpizio\/repos","events_url":"https:\/\/api.github.com\/users\/patpizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patpizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-06-04T23:05:21Z","updated_at":"2020-06-06T10:51:34Z","closed_at":"2020-06-06T10:51:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I try to download MultiNLI with \r\n```python\r\ndataset = load_dataset('multi_nli')\r\n```\r\n\r\nI get this long error:\r\n```python\r\n---------------------------------------------------------------------------\r\nOSError                                   Traceback (most recent call last)\r\n in \r\n      1 # Load a dataset and print the first examples in the training set\r\n      2 # nli_dataset = nlp.load_dataset('multi_nli')\r\n----> 3 dataset = load_dataset('multi_nli')\r\n      4 # nli_dataset = nlp.load_dataset('multi_nli', split='validation_matched[:10%]')\r\n      5 # print(nli_dataset['train'][0])\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n    514 \r\n    515     # Download and prepare data\r\n--> 516     builder_instance.download_and_prepare(\r\n    517         download_config=download_config,\r\n    518         download_mode=download_mode,\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, try_from_hf_gcs, dl_manager, **download_and_prepare_kwargs)\r\n    417             with utils.temporary_assignment(self, \"_cache_dir\", tmp_data_dir):\r\n    418                 verify_infos = not save_infos and not ignore_verifications\r\n--> 419                 self._download_and_prepare(\r\n    420                     dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n    421                 )\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n    455         split_dict = SplitDict(dataset_name=self.name)\r\n    456         split_generators_kwargs = self._make_split_generators_kwargs(prepare_split_kwargs)\r\n--> 457         split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n    458         # Checksums verification\r\n    459         if verify_infos:\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\datasets\\multi_nli\\60774175381b9f3f1e6ae1028229e3cdb270d50379f45b9f2c01008f50f09e6b\\multi_nli.py in _split_generators(self, dl_manager)\r\n     99     def _split_generators(self, dl_manager):\r\n    100 \r\n--> 101         downloaded_dir = dl_manager.download_and_extract(\r\n    102             \"http:\/\/storage.googleapis.com\/tfds-data\/downloads\/multi_nli\/multinli_1.0.zip\"\r\n    103         )\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\utils\\download_manager.py in download_and_extract(self, url_or_urls)\r\n    214             extracted_path(s): `str`, extracted paths of given URL(s).\r\n    215         \"\"\"\r\n--> 216         return self.extract(self.download(url_or_urls))\r\n    217 \r\n    218     def get_recorded_sizes_checksums(self):\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\utils\\download_manager.py in extract(self, path_or_paths)\r\n    194                 path_or_paths.\r\n    195         \"\"\"\r\n--> 196         return map_nested(\r\n    197             lambda path: cached_path(path, extract_compressed_file=True, force_extract=False), path_or_paths,\r\n    198         )\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\utils\\py_utils.py in map_nested(function, data_struct, dict_only, map_tuple)\r\n    168                 return tuple(mapped)\r\n    169     # Singleton\r\n--> 170     return function(data_struct)\r\n    171 \r\n    172 \r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\utils\\download_manager.py in (path)\r\n    195         \"\"\"\r\n    196         return map_nested(\r\n--> 197             lambda path: cached_path(path, extract_compressed_file=True, force_extract=False), path_or_paths,\r\n    198         )\r\n    199 \r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\site-packages\\nlp\\utils\\file_utils.py in cached_path(url_or_filename, download_config, **download_kwargs)\r\n    231             if is_zipfile(output_path):\r\n    232                 with ZipFile(output_path, \"r\") as zip_file:\r\n--> 233                     zip_file.extractall(output_path_extracted)\r\n    234                     zip_file.close()\r\n    235             elif tarfile.is_tarfile(output_path):\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\zipfile.py in extractall(self, path, members, pwd)\r\n   1644 \r\n   1645         for zipinfo in members:\r\n-> 1646             self._extract_member(zipinfo, path, pwd)\r\n   1647 \r\n   1648     @classmethod\r\n\r\n~\\Miniconda3\\envs\\nlp\\lib\\zipfile.py in _extract_member(self, member, targetpath, pwd)\r\n   1698 \r\n   1699         with self.open(member, pwd=pwd) as source, \\\r\n-> 1700              open(targetpath, \"wb\") as target:\r\n   1701             shutil.copyfileobj(source, target)\r\n   1702 \r\n\r\nOSError: [Errno 22] Invalid argument: 'C:\\\\Users\\\\Python\\\\.cache\\\\huggingface\\\\datasets\\\\3e12413b8ec69f22dfcfd54a79d1ba9e7aac2e18e334bbb6b81cca64fd16bffc\\\\multinli_1.0\\\\Icon\\r'\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/237\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/237\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/236","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/236\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/236\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/236\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/236","id":631099875,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI4MDUwNzI4","number":236,"title":"CompGuessWhat?! dataset ","user":{"login":"aleSuglia","id":1479733,"node_id":"MDQ6VXNlcjE0Nzk3MzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1479733?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aleSuglia","html_url":"https:\/\/github.com\/aleSuglia","followers_url":"https:\/\/api.github.com\/users\/aleSuglia\/followers","following_url":"https:\/\/api.github.com\/users\/aleSuglia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aleSuglia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aleSuglia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aleSuglia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aleSuglia\/orgs","repos_url":"https:\/\/api.github.com\/users\/aleSuglia\/repos","events_url":"https:\/\/api.github.com\/users\/aleSuglia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aleSuglia\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-06-04T19:45:50Z","updated_at":"2020-06-11T09:43:42Z","closed_at":"2020-06-11T07:45:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/236","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/236","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/236.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/236.patch","merged_at":"2020-06-11T07:45:21Z"},"body":"Hello,\r\n\r\nThanks for the amazing library that you put together. I'm Alessandro Suglia, the first author of CompGuessWhat?!, a recently released dataset for grounded language learning accepted to ACL 2020 ([https:\/\/compguesswhat.github.io](https:\/\/compguesswhat.github.io)).\r\n\r\nThis pull-request adds the CompGuessWhat?! splits that have been extracted from the original dataset. This is only part of our evaluation framework because there is also an additional split of the dataset that has a completely different set of games. I didn't integrate it yet because I didn't know what would be the best practice in this case. Let me clarify the scenario.\r\n\r\nIn our paper, we have a main dataset (let's call it `compguesswhat-gameplay`) and a zero-shot dataset (let's call it `compguesswhat-zs-gameplay`). In the current code of the pull-request, I have only integrated `compguesswhat-gameplay`. I was thinking that it would be nice to have the `compguesswhat-zs-gameplay` in the same dataset class by simply specifying some particular option to the `nlp.load_dataset()` factory. For instance:\r\n\r\n```python\r\n\r\ncgw = nlp.load_dataset(\"compguesswhat\")\r\ncgw_zs = nlp.load_dataset(\"compguesswhat\", zero_shot=True)\r\n```\r\n\r\nThe other option would be to have a separate dataset class. Any preferences?  ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/236\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/236\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/235","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/235\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/235\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/235\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/235","id":630952297,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI3OTM1MjQ0","number":235,"title":"Add experimental datasets","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-06-04T15:54:56Z","updated_at":"2020-06-12T15:38:55Z","closed_at":"2020-06-12T15:38:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/235","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/235","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/235.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/235.patch","merged_at":"2020-06-12T15:38:55Z"},"body":"## Adding an *experimental datasets* folder\r\n\r\nAfter using the \ud83e\udd17nlp library for some time, I find that while it makes it super easy to create new memory-mapped datasets with lots of cool utilities, a lot of what I want to do doesn't work well with the current `MockDownloader` based testing paradigm, making it hard to share my work with the community.\r\n\r\nMy suggestion would be to add a **datasets\\_experimental** folder so we can start making these new datasets public without having to completely re-think testing for every single one. We would allow contributors to submit dataset PRs in this folder, but require an explanation for why the current testing suite doesn't work for them. We can then aggregate the feedback and periodically see what's missing from the current tests.\r\n\r\nI have added a **datasets\\_experimental** folder to the repository and S3 bucket with two initial datasets: ELI5 (explainlikeimfive) and a Wikipedia Snippets dataset to support indexing (wiki\\_snippets)\r\n\r\n### ELI5\r\n#### Dataset description\r\nThis allows people to download the [ELI5: Long Form Question Answering](https:\/\/arxiv.org\/abs\/1907.09190) dataset, along with two variants based on the r\/askscience and r\/AskHistorians. Full Reddit dumps for each month are downloaded from [pushshift](https:\/\/files.pushshift.io\/reddit\/), filtered for submissions and comments from the desired subreddits, then deleted one at a time to save space. The resulting dataset is split into a training, validation, and test dataset for r\/explainlikeimfive, r\/askscience, and r\/AskHistorians respectively, where each item is a question along with all of its high scoring answers.\r\n\r\n#### Issues with the current testing\r\n1. the list of files to be downloaded is not pre-defined, but rather determined by parsing an index web page at run time. This is necessary as the name and compression type of the dump files changes from month to month as the pushshift website is maintained.  Currently, the dummy folder requires the user to know which files will be downloaded.\r\n2. to save time, the script works on the compressed files using the corresponding python packages rather than first running `download\\_and\\_extract` then filtering the extracted files.  \r\n\r\n### Wikipedia Snippets\r\n#### Dataset description\r\nThis script creates a *snippets* version of a source Wikipedia dataset: each article is split into passages of fixed length which can then be indexed using ElasticSearch or a dense indexer. The script currently handles all **wikipedia** and **wiki40b** source datasets, and allows the user to choose the passage length and how much overlap they want across passages. In addition to the passage text, each snippet also has the article title, list of titles of sections covered by the text, and information to map the passage back to the initial dataset at the paragraph and character level.\r\n\r\n#### Issues with the current testing\r\n1. The DatasetBuilder needs to call `nlp.load_dataset()`. Currently, testing is not recursive (the test doesn't know where to find the dummy data for the source dataset)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/235\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/235\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/234","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/234\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/234\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/234\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/234","id":630534427,"node_id":"MDU6SXNzdWU2MzA1MzQ0Mjc=","number":234,"title":"Huggingface NLP, Uploading custom dataset","user":{"login":"Nouman97","id":42269506,"node_id":"MDQ6VXNlcjQyMjY5NTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42269506?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Nouman97","html_url":"https:\/\/github.com\/Nouman97","followers_url":"https:\/\/api.github.com\/users\/Nouman97\/followers","following_url":"https:\/\/api.github.com\/users\/Nouman97\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Nouman97\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Nouman97\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Nouman97\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Nouman97\/orgs","repos_url":"https:\/\/api.github.com\/users\/Nouman97\/repos","events_url":"https:\/\/api.github.com\/users\/Nouman97\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Nouman97\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-06-04T05:59:06Z","updated_at":"2020-07-06T09:33:26Z","closed_at":"2020-07-06T09:33:26Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\n\r\nDoes anyone know how we can call our custom dataset using the nlp.load command? Let's say that I have a dataset based on the same format as that of squad-v1.1, how am I supposed to load it using huggingface nlp.\r\n\r\nThank you!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/234\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/234\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/233","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/233\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/233\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/233\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/233","id":630432132,"node_id":"MDU6SXNzdWU2MzA0MzIxMzI=","number":233,"title":"Fail to download c4 english corpus","user":{"login":"donggyukimc","id":16605764,"node_id":"MDQ6VXNlcjE2NjA1NzY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16605764?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/donggyukimc","html_url":"https:\/\/github.com\/donggyukimc","followers_url":"https:\/\/api.github.com\/users\/donggyukimc\/followers","following_url":"https:\/\/api.github.com\/users\/donggyukimc\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/donggyukimc\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/donggyukimc\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/donggyukimc\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/donggyukimc\/orgs","repos_url":"https:\/\/api.github.com\/users\/donggyukimc\/repos","events_url":"https:\/\/api.github.com\/users\/donggyukimc\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/donggyukimc\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-06-04T01:06:38Z","updated_at":"2021-01-08T07:17:32Z","closed_at":"2020-06-08T09:16:59Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"i run following code to download c4 English corpus.\r\n\r\n```\r\ndataset = nlp.load_dataset('c4', 'en', beam_runner='DirectRunner'\r\n, data_dir='\/mypath')\r\n```\r\n\r\nand i met failure as follows\r\n\r\n```\r\nDownloading and preparing dataset c4\/en (download: Unknown size, generated: Unknown size, total: Unknown size) to \/home\/adam\/.cache\/huggingface\/datasets\/c4\/en\/2.3.0...\r\nTraceback (most recent call last):\r\n  File \"download_corpus.py\", line 38, in \r\n    , data_dir='\/home\/adam\/data\/corpus\/en\/c4')\r\n  File \"\/home\/adam\/anaconda3\/envs\/adam\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 520, in load_dataset\r\n    save_infos=save_infos,\r\n  File \"\/home\/adam\/anaconda3\/envs\/adam\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 420, in download_and_prepare\r\n    dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n  File \"\/home\/adam\/anaconda3\/envs\/adam\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 816, in _download_and_prepare\r\n    dl_manager, verify_infos=False, pipeline=pipeline,\r\n  File \"\/home\/adam\/anaconda3\/envs\/adam\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 457, in _download_and_prepare\r\n    split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n  File \"\/home\/adam\/anaconda3\/envs\/adam\/lib\/python3.7\/site-packages\/nlp\/datasets\/c4\/f545de9f63300d8d02a6795e2eb34e140c47e62a803f572ac5599e170ee66ecc\/c4.py\", line 175, in _split_generators\r\n    dl_manager.download_checksums(_CHECKSUMS_URL)\r\nAttributeError: 'DownloadManager' object has no attribute 'download_checksums\r\n\r\n```\r\ncan i get any advice?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/233\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/233\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/232","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/232\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/232\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/232\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/232","id":630029568,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI3MjI5NDcy","number":232,"title":"Nlp cli fix endpoints","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-03T14:10:39Z","updated_at":"2020-06-08T09:02:58Z","closed_at":"2020-06-08T09:02:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/232","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/232","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/232.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/232.patch","merged_at":"2020-06-08T09:02:57Z"},"body":"With this PR users will be able to upload their own datasets and metrics.\r\n\r\nAs mentioned in #181, I had to use the new endpoints and revert the use of dataclasses (just in case we have changes in the API in the future).\r\n\r\nWe now distinguish commands for datasets and commands for metrics:\r\n```bash\r\nnlp-cli upload_dataset \r\nnlp-cli upload_metric \r\nnlp-cli s3_datasets {rm, ls}\r\nnlp-cli s3_metrics {rm, ls}\r\n```\r\n\r\nDoes it sound good to you @julien-c @thomwolf ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/232\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/232\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/231","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/231\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/231\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/231\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/231","id":629988694,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI3MTk3MTcz","number":231,"title":"Add .download to MockDownloadManager","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-03T13:20:00Z","updated_at":"2020-06-03T14:25:56Z","closed_at":"2020-06-03T14:25:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/231","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/231","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/231.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/231.patch","merged_at":"2020-06-03T14:25:54Z"},"body":"One method from the DownloadManager was missing and some users couldn't run the tests because of that.\r\n@yjernite ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/231\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/231\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/230","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/230\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/230\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/230\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/230","id":629983684,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI3MTkzMTQ0","number":230,"title":"Don't force to install apache beam for wikipedia dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-06-03T13:13:07Z","updated_at":"2020-06-03T14:34:09Z","closed_at":"2020-06-03T14:34:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/230","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/230","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/230.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/230.patch","merged_at":"2020-06-03T14:34:07Z"},"body":"As pointed out in #227, we shouldn't force users to install apache beam if the processed dataset can be downloaded. I moved the imports of some datasets to avoid this problem","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/230\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/230\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/229","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/229\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/229\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/229\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/229","id":629956490,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI3MTcxMzc5","number":229,"title":"Rename dataset_infos.json to dataset_info.json","user":{"login":"aswin-giridhar","id":11817160,"node_id":"MDQ6VXNlcjExODE3MTYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11817160?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aswin-giridhar","html_url":"https:\/\/github.com\/aswin-giridhar","followers_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/followers","following_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/orgs","repos_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/repos","events_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-03T12:31:44Z","updated_at":"2020-06-03T12:52:54Z","closed_at":"2020-06-03T12:48:33Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/229","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/229","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/229.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/229.patch","merged_at":null},"body":"As the file required for the viewing in the live nlp viewer is named as  dataset_info.json","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/229\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/229\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/228","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/228\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/228\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/228\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/228","id":629952402,"node_id":"MDU6SXNzdWU2Mjk5NTI0MDI=","number":228,"title":"Not able to access the XNLI dataset","user":{"login":"aswin-giridhar","id":11817160,"node_id":"MDQ6VXNlcjExODE3MTYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11817160?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aswin-giridhar","html_url":"https:\/\/github.com\/aswin-giridhar","followers_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/followers","following_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/orgs","repos_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/repos","events_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aswin-giridhar\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":{"login":"srush","id":35882,"node_id":"MDQ6VXNlcjM1ODgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35882?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/srush","html_url":"https:\/\/github.com\/srush","followers_url":"https:\/\/api.github.com\/users\/srush\/followers","following_url":"https:\/\/api.github.com\/users\/srush\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/srush\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/srush\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/srush\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/srush\/orgs","repos_url":"https:\/\/api.github.com\/users\/srush\/repos","events_url":"https:\/\/api.github.com\/users\/srush\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/srush\/received_events","type":"User","site_admin":false},"assignees":[{"login":"srush","id":35882,"node_id":"MDQ6VXNlcjM1ODgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35882?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/srush","html_url":"https:\/\/github.com\/srush","followers_url":"https:\/\/api.github.com\/users\/srush\/followers","following_url":"https:\/\/api.github.com\/users\/srush\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/srush\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/srush\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/srush\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/srush\/orgs","repos_url":"https:\/\/api.github.com\/users\/srush\/repos","events_url":"https:\/\/api.github.com\/users\/srush\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/srush\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2020-06-03T12:25:14Z","updated_at":"2020-07-17T17:44:22Z","closed_at":"2020-07-17T17:44:22Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I try to access the XNLI dataset, I get the following error. The option of plain_text get selected automatically and then I get the following error.\r\n\r\n```\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/home\/sasha\/.cache\/huggingface\/datasets\/xnli\/plain_text\/1.0.0\/dataset_info.json'\r\nTraceback:\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/streamlit\/ScriptRunner.py\", line 322, in _run_script\r\n    exec(code, module.__dict__)\r\nFile \"\/home\/sasha\/nlp_viewer\/run.py\", line 86, in \r\n    dts, fail = get(str(option.id), str(conf_option.name) if conf_option else None)\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/streamlit\/caching.py\", line 591, in wrapped_func\r\n    return get_or_create_cached_value()\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/streamlit\/caching.py\", line 575, in get_or_create_cached_value\r\n    return_value = func(*args, **kwargs)\r\nFile \"\/home\/sasha\/nlp_viewer\/run.py\", line 72, in get\r\n    builder_instance = builder_cls(name=conf)\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 610, in __init__\r\n    super(GeneratorBasedBuilder, self).__init__(*args, **kwargs)\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/nlp\/builder.py\", line 152, in __init__\r\n    self.info = DatasetInfo.from_directory(self._cache_dir)\r\nFile \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/nlp\/info.py\", line 157, in from_directory\r\n    with open(os.path.join(dataset_info_dir, DATASET_INFO_FILENAME), \"r\") as f:\r\n```\r\n\r\nIs it possible to see if the dataset_info.json is correctly placed?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/228\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/228\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/227","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/227\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/227\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/227\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/227","id":629845704,"node_id":"MDU6SXNzdWU2Mjk4NDU3MDQ=","number":227,"title":"Should we still have to force to install apache_beam to download wikipedia ?","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2020-06-03T09:33:20Z","updated_at":"2020-06-03T15:25:41Z","closed_at":"2020-06-03T15:25:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, first thanks to @lhoestq 's revolutionary work, I successfully downloaded processed wikipedia according to the doc. \ud83d\ude0d\ud83d\ude0d\ud83d\ude0d\r\n\r\nBut at the first try, it tell me to install `apache_beam` and `mwparserfromhell`, which I thought wouldn't be used according to #204 , it was kind of confusing me at that time.\r\n\r\nMaybe we should not force users to install these ? Or we just add them to`nlp`'s dependency ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/227\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/227\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/226","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/226\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/226\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/226\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/226","id":628344520,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI1OTA0MjEz","number":226,"title":"add BlendedSkillTalk dataset","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-06-01T10:54:45Z","updated_at":"2020-06-03T14:37:23Z","closed_at":"2020-06-03T14:37:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/226","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/226","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/226.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/226.patch","merged_at":"2020-06-03T14:37:22Z"},"body":"This PR add the BlendedSkillTalk dataset, which is used to fine tune the blenderbot.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/226\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/226\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/225","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/225\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/225\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/225\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/225","id":628083366,"node_id":"MDU6SXNzdWU2MjgwODMzNjY=","number":225,"title":"[ROUGE] Different scores with `files2rouge`","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[{"id":2067400959,"node_id":"MDU6TGFiZWwyMDY3NDAwOTU5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Metric%20discussion","name":"Metric discussion","color":"d722e8","default":false,"description":"Discussions on the metrics"}],"state":"closed","locked":false,"assignee":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"assignees":[{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2020-06-01T00:50:36Z","updated_at":"2020-06-03T15:27:18Z","closed_at":"2020-06-03T15:27:18Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It seems that the ROUGE score of `nlp` is lower than the one of `files2rouge`.\r\n\r\nHere is a self-contained notebook to reproduce both scores : https:\/\/colab.research.google.com\/drive\/14EyAXValB6UzKY9x4rs_T3pyL7alpw_F?usp=sharing\r\n\r\n---\r\n\r\n`nlp` : (Only mid F-scores)\r\n\r\n>rouge1 0.33508031962733364\r\nrouge2 0.14574333776191592\r\nrougeL 0.2321187823256159\r\n\r\n`files2rouge` :\r\n\r\n>Running ROUGE...\r\n===========================\r\n1 ROUGE-1 Average_R: 0.48873 (95%-conf.int. 0.41192 - 0.56339)\r\n1 ROUGE-1 Average_P: 0.29010 (95%-conf.int. 0.23605 - 0.34445)\r\n1 ROUGE-1 Average_F: 0.34761 (95%-conf.int. 0.29479 - 0.39871)\r\n===========================\r\n1 ROUGE-2 Average_R: 0.20280 (95%-conf.int. 0.14969 - 0.26244)\r\n1 ROUGE-2 Average_P: 0.12772 (95%-conf.int. 0.08603 - 0.17752)\r\n1 ROUGE-2 Average_F: 0.14798 (95%-conf.int. 0.10517 - 0.19240)\r\n===========================\r\n1 ROUGE-L Average_R: 0.32960 (95%-conf.int. 0.26501 - 0.39676)\r\n1 ROUGE-L Average_P: 0.19880 (95%-conf.int. 0.15257 - 0.25136)\r\n1 ROUGE-L Average_F: 0.23619 (95%-conf.int. 0.19073 - 0.28663)\r\n\r\n---\r\n\r\nWhen using longer predictions\/gold, the difference is bigger.  \r\n**How can I reproduce same score as `files2rouge` ?**\r\n\r\n@lhoestq \r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/225\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/225\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/224","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/224\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/224\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/224\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/224","id":627791693,"node_id":"MDU6SXNzdWU2Mjc3OTE2OTM=","number":224,"title":"[Feature Request\/Help] BLEURT model -> PyTorch","user":{"login":"adamwlev","id":6889910,"node_id":"MDQ6VXNlcjY4ODk5MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6889910?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adamwlev","html_url":"https:\/\/github.com\/adamwlev","followers_url":"https:\/\/api.github.com\/users\/adamwlev\/followers","following_url":"https:\/\/api.github.com\/users\/adamwlev\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adamwlev\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adamwlev\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adamwlev\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adamwlev\/orgs","repos_url":"https:\/\/api.github.com\/users\/adamwlev\/repos","events_url":"https:\/\/api.github.com\/users\/adamwlev\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adamwlev\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"assignees":[{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2020-05-30T18:30:40Z","updated_at":"2021-09-02T15:02:17Z","closed_at":"2021-01-04T09:53:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I am interested in porting google research's new BLEURT learned metric to PyTorch (because I wish to do something experimental with language generation and backpropping through BLEURT). I noticed that you guys don't have it yet so I am partly just asking if you plan to add it (@thomwolf said you want to do so on Twitter).\r\n\r\nI had a go of just like manually using the checkpoint that they publish which includes the weights. It seems like the architecture is exactly aligned with the out-of-the-box BertModel in transformers just with a single linear layer on top of the CLS embedding. I loaded all the weights to the PyTorch model but I am not able to get the same numbers as the BLEURT package's python api. Here is my colab notebook where I tried  https:\/\/colab.research.google.com\/drive\/1Bfced531EvQP_CpFvxwxNl25Pj6ptylY?usp=sharing . If you have any pointers on what might be going wrong that would be much appreciated!\r\n\r\nThank you muchly!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/224\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/224\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/223","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/223\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/223\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/223\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/223","id":627683386,"node_id":"MDU6SXNzdWU2Mjc2ODMzODY=","number":223,"title":"[Feature request] Add FLUE dataset ","user":{"login":"lbourdois","id":58078086,"node_id":"MDQ6VXNlcjU4MDc4MDg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/58078086?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lbourdois","html_url":"https:\/\/github.com\/lbourdois","followers_url":"https:\/\/api.github.com\/users\/lbourdois\/followers","following_url":"https:\/\/api.github.com\/users\/lbourdois\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lbourdois\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lbourdois\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lbourdois\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lbourdois\/orgs","repos_url":"https:\/\/api.github.com\/users\/lbourdois\/repos","events_url":"https:\/\/api.github.com\/users\/lbourdois\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lbourdois\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-30T08:52:15Z","updated_at":"2020-12-03T13:39:33Z","closed_at":"2020-12-03T13:39:33Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI think it would be interesting to add the FLUE dataset for francophones or anyone wishing to work on French.\r\n\r\nIn other requests, I read that you are already working on some datasets, and I was wondering if FLUE was planned.\r\n\r\nIf it is not the case, I can provide each of the cleaned FLUE datasets (in the form of a directly exploitable dataset rather than in the original xml formats which require additional processing, with the French part for cases where the dataset is based on a multilingual dataframe, etc.).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/223\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/223\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/222","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/222\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/222\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/222\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/222","id":627586690,"node_id":"MDU6SXNzdWU2Mjc1ODY2OTA=","number":222,"title":"Colab Notebook breaks when downloading the squad dataset","user":{"login":"carlos-aguayo","id":338917,"node_id":"MDQ6VXNlcjMzODkxNw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/338917?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/carlos-aguayo","html_url":"https:\/\/github.com\/carlos-aguayo","followers_url":"https:\/\/api.github.com\/users\/carlos-aguayo\/followers","following_url":"https:\/\/api.github.com\/users\/carlos-aguayo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/carlos-aguayo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/carlos-aguayo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/carlos-aguayo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/carlos-aguayo\/orgs","repos_url":"https:\/\/api.github.com\/users\/carlos-aguayo\/repos","events_url":"https:\/\/api.github.com\/users\/carlos-aguayo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/carlos-aguayo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-05-29T22:55:59Z","updated_at":"2020-06-04T00:21:05Z","closed_at":"2020-06-04T00:21:05Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When I run the notebook in Colab\r\nhttps:\/\/colab.research.google.com\/github\/huggingface\/nlp\/blob\/master\/notebooks\/Overview.ipynb\r\nbreaks when running this cell:\r\n![image](https:\/\/user-images.githubusercontent.com\/338917\/83311709-ffd1b800-a1dd-11ea-8394-3a87df0d7f8b.png)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/222\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/222\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/221","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/221\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/221\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/221\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/221","id":627300648,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI1MTI5OTc0","number":221,"title":"Fix tests\/test_dataset_common.py","user":{"login":"tayciryahmed","id":13635495,"node_id":"MDQ6VXNlcjEzNjM1NDk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13635495?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tayciryahmed","html_url":"https:\/\/github.com\/tayciryahmed","followers_url":"https:\/\/api.github.com\/users\/tayciryahmed\/followers","following_url":"https:\/\/api.github.com\/users\/tayciryahmed\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tayciryahmed\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tayciryahmed\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tayciryahmed\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tayciryahmed\/orgs","repos_url":"https:\/\/api.github.com\/users\/tayciryahmed\/repos","events_url":"https:\/\/api.github.com\/users\/tayciryahmed\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tayciryahmed\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-29T14:12:15Z","updated_at":"2020-06-01T12:20:42Z","closed_at":"2020-05-29T15:02:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/221","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/221","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/221.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/221.patch","merged_at":"2020-05-29T15:02:23Z"},"body":"When I run the command `RUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_real_dataset_arcd` while working on #220. I get the error ` unexpected keyword argument \"'download_and_prepare_kwargs'\"` at the level of  `load_dataset`. Indeed, this [function](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/src\/nlp\/load.py#L441) no longer has the argument `download_and_prepare_kwargs` but rather `download_config`. So here I change the tests accordingly. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/221\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/221\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/220","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/220\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/220\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/220\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/220","id":627280683,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI1MTEzMzEy","number":220,"title":"dataset_arcd","user":{"login":"tayciryahmed","id":13635495,"node_id":"MDQ6VXNlcjEzNjM1NDk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13635495?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tayciryahmed","html_url":"https:\/\/github.com\/tayciryahmed","followers_url":"https:\/\/api.github.com\/users\/tayciryahmed\/followers","following_url":"https:\/\/api.github.com\/users\/tayciryahmed\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tayciryahmed\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tayciryahmed\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tayciryahmed\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tayciryahmed\/orgs","repos_url":"https:\/\/api.github.com\/users\/tayciryahmed\/repos","events_url":"https:\/\/api.github.com\/users\/tayciryahmed\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tayciryahmed\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-29T13:46:50Z","updated_at":"2020-05-29T14:58:40Z","closed_at":"2020-05-29T14:57:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/220","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/220","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/220.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/220.patch","merged_at":"2020-05-29T14:57:21Z"},"body":"Added Arabic Reading Comprehension Dataset (ARCD): https:\/\/arxiv.org\/abs\/1906.05394","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/220\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":1,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/220\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/219","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/219\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/219\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/219\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/219","id":627235893,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI1MDc2NjQx","number":219,"title":"force mwparserfromhell as third party","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-29T12:33:17Z","updated_at":"2020-05-29T13:30:13Z","closed_at":"2020-05-29T13:30:12Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/219","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/219","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/219.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/219.patch","merged_at":"2020-05-29T13:30:12Z"},"body":"This should fix your env because you had `mwparserfromhell ` as a first party for `isort` @patrickvonplaten ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/219\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/219\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/218","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/218\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/218\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/218\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/218","id":627173407,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI1MDI2NzEz","number":218,"title":"Add Natual Questions and  C4 scripts","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-29T10:40:30Z","updated_at":"2020-05-29T12:31:01Z","closed_at":"2020-05-29T12:31:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/218","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/218","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/218.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/218.patch","merged_at":"2020-05-29T12:31:00Z"},"body":"Scripts are ready !\r\nHowever they are not processed nor directly available from gcp yet.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/218\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/218\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/217","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/217\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/217\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/217\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/217","id":627128403,"node_id":"MDU6SXNzdWU2MjcxMjg0MDM=","number":217,"title":"Multi-task dataset mixing","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":22,"created_at":"2020-05-29T09:22:26Z","updated_at":"2020-10-26T08:46:33Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It seems like many of the best performing models on the GLUE benchmark make some use of multitask learning (simultaneous training on multiple tasks).\r\n\r\nThe [T5 paper](https:\/\/arxiv.org\/pdf\/1910.10683.pdf) highlights multiple ways of mixing the tasks together during finetuning:\r\n- **Examples-proportional mixing** - sample from tasks proportionally to their dataset size\r\n- **Equal mixing** - sample uniformly from each task\r\n- **Temperature-scaled mixing** - The generalized approach used by multilingual BERT which uses a temperature T, where the mixing rate of each task is raised to the power 1\/T and renormalized. When T=1 this is equivalent to equal mixing, and becomes closer to equal mixing with increasing T.\r\n\r\nFollowing this discussion https:\/\/github.com\/huggingface\/transformers\/issues\/4340 in [transformers](https:\/\/github.com\/huggingface\/transformers), @enzoampil suggested that the `nlp` library might be a better place for this functionality.\r\n\r\nSome method for combining datasets could be implemented ,e.g.\r\n```\r\ndataset = nlp.load_multitask(['squad','imdb','cnn_dm'], temperature=2.0, ...)\r\n```\r\n\r\nWe would need a few additions:\r\n- Method of identifying the tasks - how can we support adding a string to each task as an identifier: e.g. 'summarisation: '?\r\n- Method of combining the metrics - a standard approach is to use the specific metric for each task and add them together for a combined score.\r\n\r\nIt would be great to support common use cases such as pretraining on the GLUE benchmark before fine-tuning on each GLUE task in turn. \r\n\r\nI'm willing to write bits\/most of this I just need some guidance on the interface and other library details so I can integrate it properly.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/217\/reactions","total_count":9,"+1":9,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/217\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/216","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/216\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/216\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/216\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/216","id":626896890,"node_id":"MDU6SXNzdWU2MjY4OTY4OTA=","number":216,"title":"\u2753 How to get ROUGE-2 with the ROUGE metric ?","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-28T23:47:32Z","updated_at":"2020-06-01T00:04:35Z","closed_at":"2020-06-01T00:04:35Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to use ROUGE metric, but I don't know how to get the ROUGE-2 metric.\r\n\r\n---\r\n\r\nI compute scores with :\r\n\r\n```python\r\nimport nlp\r\n\r\nrouge = nlp.load_metric('rouge')\r\nwith open(\"pred.txt\") as p, open(\"ref.txt\") as g:\r\n    for lp, lg in zip(p, g):\r\n        rouge.add([lp], [lg])\r\nscore = rouge.compute()\r\n```\r\n\r\nthen : _(print only the F-score for readability)_\r\n\r\n```python\r\nfor k, s in score.items():\r\n    print(k, s.mid.fmeasure)\r\n```\r\n\r\nIt gives :\r\n\r\n>rouge1 0.7915168355671788\r\nrougeL 0.7915168355671788\r\n\r\n---\r\n\r\n**How can I get the ROUGE-2 score ?**\r\n\r\nAlso, it's seems weird that ROUGE-1 and ROUGE-L scores are the same. Did I made a mistake ?\r\n\r\n@lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/216\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/216\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/215","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/215\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/215\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/215\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/215","id":626867879,"node_id":"MDU6SXNzdWU2MjY4Njc4Nzk=","number":215,"title":"NonMatchingSplitsSizesError when loading blog_authorship_corpus","user":{"login":"cedricconol","id":52105365,"node_id":"MDQ6VXNlcjUyMTA1MzY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52105365?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cedricconol","html_url":"https:\/\/github.com\/cedricconol","followers_url":"https:\/\/api.github.com\/users\/cedricconol\/followers","following_url":"https:\/\/api.github.com\/users\/cedricconol\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cedricconol\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cedricconol\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cedricconol\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cedricconol\/orgs","repos_url":"https:\/\/api.github.com\/users\/cedricconol\/repos","events_url":"https:\/\/api.github.com\/users\/cedricconol\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cedricconol\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2020-05-28T22:55:19Z","updated_at":"2022-02-10T13:05:45Z","closed_at":"2022-02-10T13:05:45Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Getting this error when i run `nlp.load_dataset('blog_authorship_corpus')`. \r\n\r\n```\r\nraise NonMatchingSplitsSizesError(str(bad_splits))\r\nnlp.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', \r\nnum_bytes=610252351, num_examples=532812, dataset_name='blog_authorship_corpus'), \r\n'recorded': SplitInfo(name='train', num_bytes=616473500, num_examples=536323, \r\ndataset_name='blog_authorship_corpus')}, {'expected': SplitInfo(name='validation', \r\nnum_bytes=37500394, num_examples=31277, dataset_name='blog_authorship_corpus'), \r\n'recorded': SplitInfo(name='validation', num_bytes=30786661, num_examples=27766, \r\ndataset_name='blog_authorship_corpus')}]\r\n```\r\n\r\nUpon checking it seems like there is a disparity between the information in `datasets\/blog_authorship_corpus\/dataset_infos.json` and what was downloaded. Although I can get away with this by passing `ignore_verifications=True` in `load_dataset`, I'm thinking doing so might give problems later on.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/215\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/215\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/214","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/214\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/214\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/214\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/214","id":626641549,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI0NTk1NjIx","number":214,"title":"[arrow_dataset.py] add new filter function","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":13,"created_at":"2020-05-28T16:21:40Z","updated_at":"2020-05-29T11:43:29Z","closed_at":"2020-05-29T11:32:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/214","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/214","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/214.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/214.patch","merged_at":"2020-05-29T11:32:20Z"},"body":"The `.map()` function is super useful, but can IMO a bit tedious when filtering certain examples.\r\nI think, filtering out examples is also a very common operation people would like to perform on datasets.\r\n\r\nThis PR is a proposal to add a `.filter()` function in the same spirit than the `.map()` function.\r\n\r\nHere is a sample code you can play around with:\r\n\r\n```python\r\nds = nlp.load_dataset(\"squad\", split=\"validation[:10%]\")\r\n\r\n\r\ndef remove_under_idx_5(example, idx):\r\n    return idx < 5\r\n\r\n\r\ndef only_keep_examples_with_is_in_context(example):\r\n    return \"is\" in example[\"context\"]\r\n\r\n\r\nresult_keep_only_first_5 = ds.filter(remove_under_idx_5, with_indices=True, load_from_cache_file=False)\r\nresult_keep_examples_with_is_in_context = ds.filter(only_keep_examples_with_is_in_context, load_from_cache_file=False)\r\n\r\nprint(\"Original number of examples: {}\".format(len(ds)))\r\nprint(\"First five examples number of examples: {}\".format(len(result_keep_only_first_5)))\r\nprint(\"Is in context examples number of examples: {}\".format(len(result_keep_examples_with_is_in_context)))\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/214\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/214\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/213","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/213\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/213\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/213\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/213","id":626587995,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI0NTUxODE3","number":213,"title":"better message if missing beam options","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-28T15:06:57Z","updated_at":"2020-05-29T09:51:17Z","closed_at":"2020-05-29T09:51:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/213","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/213","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/213.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/213.patch","merged_at":"2020-05-29T09:51:16Z"},"body":"WDYT @yjernite ?\r\nFor example:\r\n```python\r\ndataset = nlp.load_dataset('wikipedia', '20200501.aa')\r\n```\r\nRaises:\r\n```\r\nMissingBeamOptions: Trying to generate a dataset using Apache Beam, yet no Beam Runner or PipelineOptions() has been provided in `load_dataset` or in the builder arguments. For big datasets it has to run on large-scale data processing tools like Dataflow, Spark, etc. More information about Apache Beam runners at https:\/\/beam.apache.org\/documentation\/runners\/capability-matrix\/\r\nIf you really want to run it locally because you feel like the Dataset is small enough, you can use the local beam runner called `DirectRunner` (you may run out of memory). \r\nExample of usage: \r\n\t`load_dataset('wikipedia', '20200501.aa', beam_runner='DirectRunner')`\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/213\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/213\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/212","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/212\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/212\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/212\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/212","id":626580198,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI0NTQ1NjAy","number":212,"title":"have 'add' and 'add_batch' for metrics","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-28T14:56:47Z","updated_at":"2020-05-29T10:41:05Z","closed_at":"2020-05-29T10:41:04Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/212","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/212","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/212.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/212.patch","merged_at":"2020-05-29T10:41:04Z"},"body":"This should fix #116 \r\n\r\nPreviously the `.add` method of metrics expected a batch of examples.\r\nNow `.add` expects one prediction\/reference and `.add_batch` expects a batch.\r\nI think it is more coherent with the way the ArrowWriter works.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/212\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/212\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/211","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/211\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/211\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/211\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/211","id":626565994,"node_id":"MDU6SXNzdWU2MjY1NjU5OTQ=","number":211,"title":"[Arrow writer, Trivia_qa] Could not convert TagMe with type str: converting to null type","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"assignees":[{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2020-05-28T14:38:14Z","updated_at":"2020-07-23T10:15:16Z","closed_at":"2020-07-23T10:15:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Running the following code \r\n\r\n```\r\nimport nlp\r\nds = nlp.load_dataset(\"trivia_qa\", \"rc\", split=\"validation[:1%]\")  # this might take 2.3 min to download but it's cached afterwards...\r\nds.map(lambda x: x, load_from_cache_file=False)\r\n```\r\n\r\ntriggers a `ArrowInvalid: Could not convert TagMe with type str: converting to null type` error.\r\n\r\nOn the other hand if we remove a certain column of `trivia_qa` which seems responsible for the bug, it works:\r\n\r\n```\r\nimport nlp\r\nds = nlp.load_dataset(\"trivia_qa\", \"rc\", split=\"validation[:1%]\")  # this might take 2.3 min to download but it's cached afterwards...\r\nds.map(lambda x: x, remove_columns=[\"entity_pages\"], load_from_cache_file=False)\r\n```\r\n\r\n. Seems quite hard to debug what's going on here... @lhoestq @thomwolf - do you have a good first guess what the problem could be?\r\n\r\n**Note** BTW: I think this could be a good test to check that the datasets work correctly: Take a tiny portion of the dataset and check that it can be written correctly.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/211\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/211\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/210","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/210\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/210\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/210\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/210","id":626504243,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI0NDgyNDgz","number":210,"title":"fix xnli metric kwargs description","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-28T13:21:44Z","updated_at":"2020-05-28T13:22:11Z","closed_at":"2020-05-28T13:22:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/210","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/210","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/210.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/210.patch","merged_at":"2020-05-28T13:22:10Z"},"body":"The text was wrong as noticed in #202 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/210\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/210\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/209","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/209\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/209\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/209\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/209","id":626405849,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI0NDAwOTc4","number":209,"title":"Add a Google Drive exception for small files","user":{"login":"airKlizz","id":25703835,"node_id":"MDQ6VXNlcjI1NzAzODM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25703835?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/airKlizz","html_url":"https:\/\/github.com\/airKlizz","followers_url":"https:\/\/api.github.com\/users\/airKlizz\/followers","following_url":"https:\/\/api.github.com\/users\/airKlizz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/airKlizz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/airKlizz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/airKlizz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/airKlizz\/orgs","repos_url":"https:\/\/api.github.com\/users\/airKlizz\/repos","events_url":"https:\/\/api.github.com\/users\/airKlizz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/airKlizz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-28T10:40:17Z","updated_at":"2020-05-28T15:15:04Z","closed_at":"2020-05-28T15:15:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/209","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/209","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/209.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/209.patch","merged_at":"2020-05-28T15:15:04Z"},"body":"I tried to use the ``nlp`` library to load personnal datasets. I mainly copy-paste the code for ``multi-news`` dataset because my files are stored on Google Drive. \r\n\r\nOne of my dataset is small (< 25Mo) so it can be verified by Drive without asking the authorization to the user. This makes the download starts directly. \r\n\r\nCurrently the ``nlp`` raises a error: ``ConnectionError: Couldn't reach https:\/\/drive.google.com\/uc?export=download&id=1DGnbUY9zwiThTdgUvVTSAvSVHoloCgun`` while the url is working. So I just add a new exception as you have already done for  ``firebasestorage.googleapis.com`` : \r\n\r\n```\r\nelif (response.status_code == 400 and \"firebasestorage.googleapis.com\" in url) or (response.status_code == 405 and \"drive.google.com\" in url)\r\n```\r\n\r\nI make an example of the error that you can run on [![Open In Colab](https:\/\/colab.research.google.com\/assets\/colab-badge.svg)](https:\/\/colab.research.google.com\/drive\/1ae_JJ9uvUt-9GBh0uGZhjbF5aXkl-BPv?usp=sharing)\r\n\r\nI avoid the error by adding an exception but there is maybe a proper way to do it.\r\n\r\nMany thanks :hugs:\r\nBest,","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/209\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/209\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/208","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/208\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/208\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/208\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/208","id":626398519,"node_id":"MDExOlB1bGxSZXF1ZXN0NDI0Mzk0ODIx","number":208,"title":"[Dummy data] insert config name instead of config ","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-28T10:28:19Z","updated_at":"2020-05-28T12:48:01Z","closed_at":"2020-05-28T12:48:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/208","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/208","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/208.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/208.patch","merged_at":"2020-05-28T12:48:00Z"},"body":"Thanks @yjernite for letting me know. in the dummy data command the config name shuold be passed to the dataset builder and not the config itself. \r\n\r\nAlso, @lhoestq fixed small import bug introduced by beam command I think.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/208\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/208\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/207","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/207\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/207\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/207\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/207","id":625932200,"node_id":"MDU6SXNzdWU2MjU5MzIyMDA=","number":207,"title":"Remove test set from NLP viewer","user":{"login":"chrisdonahue","id":748399,"node_id":"MDQ6VXNlcjc0ODM5OQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/748399?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chrisdonahue","html_url":"https:\/\/github.com\/chrisdonahue","followers_url":"https:\/\/api.github.com\/users\/chrisdonahue\/followers","following_url":"https:\/\/api.github.com\/users\/chrisdonahue\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chrisdonahue\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chrisdonahue\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chrisdonahue\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chrisdonahue\/orgs","repos_url":"https:\/\/api.github.com\/users\/chrisdonahue\/repos","events_url":"https:\/\/api.github.com\/users\/chrisdonahue\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chrisdonahue\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-27T18:32:07Z","updated_at":"2022-02-10T13:17:45Z","closed_at":"2022-02-10T13:17:45Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"While the new [NLP viewer](https:\/\/huggingface.co\/nlp\/viewer\/) is a great tool, I think it would be best to outright remove the option of looking at the test sets. At the very least, a warning should be displayed to users before showing the test set. Newcomers to the field might not be aware of best practices, and small things like this can help increase awareness.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/207\/reactions","total_count":3,"+1":3,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/207\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/206","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/206\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/206\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/206\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/206","id":625842989,"node_id":"MDU6SXNzdWU2MjU4NDI5ODk=","number":206,"title":"[Question] Combine 2 datasets which have the same columns","user":{"login":"airKlizz","id":25703835,"node_id":"MDQ6VXNlcjI1NzAzODM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25703835?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/airKlizz","html_url":"https:\/\/github.com\/airKlizz","followers_url":"https:\/\/api.github.com\/users\/airKlizz\/followers","following_url":"https:\/\/api.github.com\/users\/airKlizz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/airKlizz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/airKlizz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/airKlizz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/airKlizz\/orgs","repos_url":"https:\/\/api.github.com\/users\/airKlizz\/repos","events_url":"https:\/\/api.github.com\/users\/airKlizz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/airKlizz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-27T16:25:52Z","updated_at":"2020-06-10T09:11:14Z","closed_at":"2020-06-10T09:11:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi,\r\n\r\nI am using ``nlp`` to load personal datasets. I created summarization datasets in multi-languages based on wikinews. I have one dataset for english and one for german (french is getting to be ready as well). I want to keep these datasets independent because they need different pre-processing (add different task-specific prefixes for T5 : *summarize:* for english and *zusammenfassen:* for german)\r\n\r\nMy issue is that I want to train T5 on the combined english and german datasets to see if it improves results. So I would like to combine 2 datasets (which have the same columns) to make one and train T5 on it. I was wondering if there is a proper way to do it? I assume that it can be done by combining all examples of each dataset but maybe you have a better solution.\r\n\r\nHoping this is clear enough,\r\n\r\nThanks a lot \ud83d\ude0a\r\nBest","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/206\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/206\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/205","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/205\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/205\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/205\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/205","id":625839335,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIzOTY2ODE1","number":205,"title":"Better arrow dataset iter","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-27T16:20:21Z","updated_at":"2020-05-27T16:39:58Z","closed_at":"2020-05-27T16:39:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/205","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/205","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/205.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/205.patch","merged_at":"2020-05-27T16:39:56Z"},"body":"I tried to play around with `tf.data.Dataset.from_generator` and I found out that the `__iter__` that we have for `nlp.arrow_dataset.Dataset` ignores the format that has been set (torch or tensorflow).\r\nWith these changes I should be able to come up with a `tf.data.Dataset` that uses lazy loading, as asked in #193.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/205\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/205\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/204","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/204\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/204\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/204\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/204","id":625655849,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIzODE5MTQw","number":204,"title":"Add Dataflow support + Wikipedia + Wiki40b","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-27T12:32:49Z","updated_at":"2020-05-28T08:10:35Z","closed_at":"2020-05-28T08:10:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/204","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/204","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/204.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/204.patch","merged_at":"2020-05-28T08:10:34Z"},"body":"# Add Dataflow support + Wikipedia + Wiki40b\r\n\r\n## Support datasets processing with Apache Beam\r\n\r\nSome datasets are too big to be processed on a single machine, for example: wikipedia, wiki40b, etc. Apache Beam allows to process datasets on many execution engines like Dataflow, Spark, Flink, etc.\r\n\r\nTo process such datasets with Beam, I added a command to run beam pipelines `nlp-cli run_beam path\/to\/dataset\/script`. Then I used it to process the english + french wikipedia, and the english of wiki40b.\r\nThe processed arrow files are on GCS and are the result of a Dataflow job.\r\n\r\nI added a markdown documentation file in `docs` that explains how to use it properly.\r\n\r\n## Load already processed datasets\r\n\r\nNow that we have those datasets already processed, I made it possible to load datasets that are already processed. You can do `load_dataset('wikipedia', '20200501.en')` and it will download the processed files from the Hugging Face GCS directly into the user's cache and be ready to use !\r\n\r\nThe Wikipedia dataset was already asked in #187 and this PR should soon allow to add Natural Questions as asked in #129 \r\n\r\n## Other changes in the code\r\n\r\nTo make things work, I had to do a few adjustments:\r\n- add a `ship_files_with_pipeline` method to the `DownloadManager`. This is because beam pipelines can be run in the cloud and therefore need to have access to your downloaded data. I used it in the wikipedia script:\r\n    ```python\r\n    if not pipeline.is_local():\r\n            downloaded_files = dl_manager.ship_files_with_pipeline(downloaded_files, pipeline)\r\n    ```\r\n- add parquet to arrow conversion. This is because the output of beam pipelines are parquet files so we need to convert them to arrow and have the arrow files on GCS\r\n- add a test script with a dummy beam dataset\r\n- minor adjustments to allow read\/write operations on remote files using `apache_beam.io.filesystems.FileSystems` if we want (it can be connected to gcp, s3, hdfs, etc...)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/204\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/204\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/203","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/203\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/203\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/203\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/203","id":625515488,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIzNzEyMTQ3","number":203,"title":"Raise an error if no config name for datasets like glue","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-27T09:03:58Z","updated_at":"2020-05-27T16:40:39Z","closed_at":"2020-05-27T16:40:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/203","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/203","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/203.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/203.patch","merged_at":"2020-05-27T16:40:38Z"},"body":"Some datasets like glue (see #130) and scientific_papers (see #197) have many configs.\r\nFor example for glue there are cola, sst2, mrpc etc.\r\n\r\nCurrently if a user does `load_dataset('glue')`, then Cola is loaded by default and it can be confusing. Instead, we should raise an error to let the user know that he has to pick one of the available configs (as proposed in #152). For example for glue, the message looks like:\r\n```\r\nValueError: Config name is missing.\r\nPlease pick one among the available configs: ['cola', 'sst2', 'mrpc', 'qqp', 'stsb', 'mnli', 'mnli_mismatched', 'mnli_matched', 'qnli', 'rte', 'wnli', 'ax']\r\nExample of usage:\r\n\t`load_dataset('glue', 'cola')`\r\n```\r\n\r\nThe error is raised if the config name is missing and if there are >=2 possible configs.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/203\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/203\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/202","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/202\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/202\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/202\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/202","id":625493983,"node_id":"MDU6SXNzdWU2MjU0OTM5ODM=","number":202,"title":"Mistaken `_KWARGS_DESCRIPTION` for XNLI metric","user":{"login":"phiyodr","id":33572125,"node_id":"MDQ6VXNlcjMzNTcyMTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33572125?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/phiyodr","html_url":"https:\/\/github.com\/phiyodr","followers_url":"https:\/\/api.github.com\/users\/phiyodr\/followers","following_url":"https:\/\/api.github.com\/users\/phiyodr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/phiyodr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/phiyodr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/phiyodr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/phiyodr\/orgs","repos_url":"https:\/\/api.github.com\/users\/phiyodr\/repos","events_url":"https:\/\/api.github.com\/users\/phiyodr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/phiyodr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-27T08:34:42Z","updated_at":"2020-05-28T13:22:36Z","closed_at":"2020-05-28T13:22:36Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi!\r\n\r\nThe [`_KWARGS_DESCRIPTION`](https:\/\/github.com\/huggingface\/nlp\/blob\/7d0fa58641f3f462fb2861dcdd6ce7f0da3f6a56\/metrics\/xnli\/xnli.py#L45) for the XNLI metric uses `Args` and `Returns` text from [BLEU](https:\/\/github.com\/huggingface\/nlp\/blob\/7d0fa58641f3f462fb2861dcdd6ce7f0da3f6a56\/metrics\/bleu\/bleu.py#L58) metric:\r\n\r\n```\r\n_KWARGS_DESCRIPTION = \"\"\"\r\nComputes XNLI score which is just simple accuracy.\r\nArgs:\r\n    predictions: list of translations to score.\r\n        Each translation should be tokenized into a list of tokens.\r\n    references: list of lists of references for each translation.\r\n        Each reference should be tokenized into a list of tokens.\r\n    max_order: Maximum n-gram order to use when computing BLEU score.\r\n    smooth: Whether or not to apply Lin et al. 2004 smoothing.\r\nReturns:\r\n    'bleu': bleu score,\r\n    'precisions': geometric mean of n-gram precisions,\r\n    'brevity_penalty': brevity penalty,\r\n    'length_ratio': ratio of lengths,\r\n    'translation_length': translation_length,\r\n    'reference_length': reference_length\r\n\"\"\"\r\n```\r\n\r\nBut it should be something like:\r\n\r\n```\r\n_KWARGS_DESCRIPTION = \"\"\"\r\nComputes XNLI score which is just simple accuracy.\r\nArgs:\r\n    predictions: Predicted labels.\r\n    references: Ground truth labels.\r\nReturns:\r\n    'accuracy': accuracy\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/202\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/202\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/201","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/201\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/201\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/201\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/201","id":625235430,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIzNDkzNTMw","number":201,"title":"Fix typo in README","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-26T22:18:21Z","updated_at":"2020-05-26T23:40:31Z","closed_at":"2020-05-26T23:00:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/201","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/201","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/201.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/201.patch","merged_at":"2020-05-26T23:00:56Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/201\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/201\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/200","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/200\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/200\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/200\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/200","id":625226638,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIzNDg2NTM0","number":200,"title":"[ArrowWriter] Set schema at first write example","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-26T21:59:48Z","updated_at":"2020-05-27T09:07:54Z","closed_at":"2020-05-27T09:07:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/200","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/200","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/200.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/200.patch","merged_at":"2020-05-27T09:07:53Z"},"body":"Right now if the schema was not specified when instantiating `ArrowWriter`, then it could be set with the first `write_table` for example (it calls `self._build_writer()` to do so).\r\n\r\nI noticed that it was not done if the first example is added via `.write`, so I added it for coherence.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/200\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/200\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/199","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/199\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/199\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/199\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/199","id":625217440,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIzNDc4ODIx","number":199,"title":"Fix GermEval 2014 dataset infos","user":{"login":"stefan-it","id":20651387,"node_id":"MDQ6VXNlcjIwNjUxMzg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20651387?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stefan-it","html_url":"https:\/\/github.com\/stefan-it","followers_url":"https:\/\/api.github.com\/users\/stefan-it\/followers","following_url":"https:\/\/api.github.com\/users\/stefan-it\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stefan-it\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stefan-it\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stefan-it\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stefan-it\/orgs","repos_url":"https:\/\/api.github.com\/users\/stefan-it\/repos","events_url":"https:\/\/api.github.com\/users\/stefan-it\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stefan-it\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-26T21:41:44Z","updated_at":"2020-05-26T21:50:24Z","closed_at":"2020-05-26T21:50:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/199","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/199","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/199.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/199.patch","merged_at":"2020-05-26T21:50:24Z"},"body":"Hi,\r\n\r\nthis PR just removes the `dataset_info.json` file and adds a newly generated `dataset_infos.json` file.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/199\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/199\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/198","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/198\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/198\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/198\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/198","id":625200627,"node_id":"MDU6SXNzdWU2MjUyMDA2Mjc=","number":198,"title":"Index outside of table length","user":{"login":"casajarm","id":305717,"node_id":"MDQ6VXNlcjMwNTcxNw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/305717?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/casajarm","html_url":"https:\/\/github.com\/casajarm","followers_url":"https:\/\/api.github.com\/users\/casajarm\/followers","following_url":"https:\/\/api.github.com\/users\/casajarm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/casajarm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/casajarm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/casajarm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/casajarm\/orgs","repos_url":"https:\/\/api.github.com\/users\/casajarm\/repos","events_url":"https:\/\/api.github.com\/users\/casajarm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/casajarm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-26T21:09:40Z","updated_at":"2020-05-26T22:43:49Z","closed_at":"2020-05-26T22:43:49Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The offset input box warns of numbers larger than a limit (like 2000) but then the errors start at a smaller value than that limit (like 1955).\r\n\r\n> ValueError: Index (2000) outside of table length (2000).\r\n> Traceback:\r\n> File \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/streamlit\/ScriptRunner.py\", line 322, in _run_script\r\n>     exec(code, module.__dict__)\r\n> File \"\/home\/sasha\/nlp_viewer\/run.py\", line 116, in \r\n>     v = d[item][k]\r\n> File \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/nlp\/arrow_dataset.py\", line 338, in __getitem__\r\n>     output_all_columns=self._output_all_columns,\r\n> File \"\/home\/sasha\/.local\/lib\/python3.7\/site-packages\/nlp\/arrow_dataset.py\", line 290, in _getitem\r\n>     raise ValueError(f\"Index ({key}) outside of table length ({self._data.num_rows}).\")","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/198\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/198\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/197","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/197\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/197\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/197\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/197","id":624966904,"node_id":"MDU6SXNzdWU2MjQ5NjY5MDQ=","number":197,"title":"Scientific Papers only downloading Pubmed","user":{"login":"antmarakis","id":17463361,"node_id":"MDQ6VXNlcjE3NDYzMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17463361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/antmarakis","html_url":"https:\/\/github.com\/antmarakis","followers_url":"https:\/\/api.github.com\/users\/antmarakis\/followers","following_url":"https:\/\/api.github.com\/users\/antmarakis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/antmarakis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/antmarakis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/antmarakis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/antmarakis\/orgs","repos_url":"https:\/\/api.github.com\/users\/antmarakis\/repos","events_url":"https:\/\/api.github.com\/users\/antmarakis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/antmarakis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-26T15:18:47Z","updated_at":"2020-05-28T08:19:28Z","closed_at":"2020-05-28T08:19:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi!\r\n\r\nI have been playing around with this module, and I am a bit confused about the `scientific_papers` dataset. I thought that it would download two separate datasets, arxiv and pubmed. But when I run the following:\r\n\r\n```\r\ndataset = nlp.load_dataset('scientific_papers', data_dir='.', cache_dir='.')\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 5.05k\/5.05k [00:00<00:00, 2.66MB\/s]\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 4.90k\/4.90k [00:00<00:00, 2.42MB\/s]\r\nDownloading and preparing dataset scientific_papers\/pubmed (download: 4.20 GiB, generated: 2.33 GiB, total: 6.53 GiB) to .\/scientific_papers\/pubmed\/1.1.1...\r\nDownloading: 3.62GB [00:40, 90.5MB\/s]\r\nDownloading: 880MB [00:08, 101MB\/s]\r\nDataset scientific_papers downloaded and prepared to .\/scientific_papers\/pubmed\/1.1.1. Subsequent calls will reuse this data.\r\n```\r\n\r\nonly a pubmed folder is created. There doesn't seem to be something for arxiv. Are these two datasets merged? Or have I misunderstood something?\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/197\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/197\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/196","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/196\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/196\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/196\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/196","id":624901266,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIzMjIwMjIw","number":196,"title":"Check invalid config name","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":13,"created_at":"2020-05-26T13:52:51Z","updated_at":"2020-05-26T21:04:56Z","closed_at":"2020-05-26T21:04:55Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/196","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/196","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/196.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/196.patch","merged_at":"2020-05-26T21:04:55Z"},"body":"As said in #194, we should raise an error if the config name has bad characters.\r\nBad characters are those that are not allowed for directory names on windows.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/196\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/196\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/195","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/195\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/195\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/195\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/195","id":624858686,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIzMTg1NTAy","number":195,"title":"[Dummy data command] add new case to command","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-26T12:50:47Z","updated_at":"2020-05-26T14:38:28Z","closed_at":"2020-05-26T14:38:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/195","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/195","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/195.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/195.patch","merged_at":"2020-05-26T14:38:27Z"},"body":"Qanta: #194 introduces a case that was not noticed before. This change in code helps community users to have an easier time creating the dummy data. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/195\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/195\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/194","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/194\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/194\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/194\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/194","id":624854897,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIzMTgyNDM5","number":194,"title":"Add Dataset: Qanta","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-26T12:44:35Z","updated_at":"2020-05-26T16:58:17Z","closed_at":"2020-05-26T13:16:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/194","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/194","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/194.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/194.patch","merged_at":"2020-05-26T13:16:20Z"},"body":"Fixes dummy data for #169 @EntilZha","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/194\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/194\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/193","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/193\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/193\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/193\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/193","id":624655558,"node_id":"MDU6SXNzdWU2MjQ2NTU1NTg=","number":193,"title":"[Tensorflow] Use something else than `from_tensor_slices()`","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2020-05-26T07:19:14Z","updated_at":"2020-10-27T15:28:11Z","closed_at":"2020-10-27T15:28:11Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"In the example notebook, the TF Dataset is built using `from_tensor_slices()` :\r\n\r\n```python\r\ncolumns = ['input_ids', 'token_type_ids', 'attention_mask', 'start_positions', 'end_positions']\r\ntrain_tf_dataset.set_format(type='tensorflow', columns=columns)\r\nfeatures = {x: train_tf_dataset[x] for x in columns[:3]} \r\nlabels = {\"output_1\": train_tf_dataset[\"start_positions\"]}\r\nlabels[\"output_2\"] = train_tf_dataset[\"end_positions\"]\r\ntfdataset = tf.data.Dataset.from_tensor_slices((features, labels)).batch(8)\r\n```\r\n\r\nBut according to [official tensorflow documentation](https:\/\/www.tensorflow.org\/guide\/data#consuming_numpy_arrays), this will load the entire dataset to memory.\r\n\r\n**This defeats one purpose of this library, which is lazy loading.**\r\n\r\nIs there any other way to load the `nlp` dataset into TF dataset lazily ?\r\n\r\n---\r\n\r\nFor example, is it possible to use [Arrow dataset](https:\/\/www.tensorflow.org\/io\/api_docs\/python\/tfio\/arrow\/ArrowDataset) ? If yes, is there any code example ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/193\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/193\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/192","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/192\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/192\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/192\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/192","id":624397592,"node_id":"MDU6SXNzdWU2MjQzOTc1OTI=","number":192,"title":"[Question] Create Apache Arrow dataset from raw text file","user":{"login":"mrm8488","id":3653789,"node_id":"MDQ6VXNlcjM2NTM3ODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3653789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mrm8488","html_url":"https:\/\/github.com\/mrm8488","followers_url":"https:\/\/api.github.com\/users\/mrm8488\/followers","following_url":"https:\/\/api.github.com\/users\/mrm8488\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mrm8488\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mrm8488\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mrm8488\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mrm8488\/orgs","repos_url":"https:\/\/api.github.com\/users\/mrm8488\/repos","events_url":"https:\/\/api.github.com\/users\/mrm8488\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mrm8488\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-05-25T16:42:47Z","updated_at":"2021-12-18T01:45:34Z","closed_at":"2020-10-27T15:20:22Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi guys, I have gathered and preprocessed about 2GB of COVID papers from CORD dataset @ Kggle. I have seen you have a text dataset as \"Crime and punishment\" in Apache arrow format. Do you have any script to do it from a raw txt file (preprocessed as for BERT like) or any guide?\r\nIs the worth of send it to you and add it to the NLP library?\r\nThanks, Manu\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/192\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/192\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/191","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/191\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/191\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/191\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/191","id":624394936,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIyODI3MDMy","number":191,"title":"[Squad es] add dataset_infos","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-25T16:35:52Z","updated_at":"2020-05-25T16:39:59Z","closed_at":"2020-05-25T16:39:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/191","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/191","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/191.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/191.patch","merged_at":"2020-05-25T16:39:58Z"},"body":"@mariamabarham - was still about to upload this. Should have waited with my comment a bit more :D ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/191\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/191\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/190","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/190\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/190\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/190\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/190","id":624124600,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIyNjA4NzAw","number":190,"title":"add squad Spanish v1 and v2","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-05-25T08:08:40Z","updated_at":"2020-05-25T16:28:46Z","closed_at":"2020-05-25T16:28:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/190","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/190","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/190.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/190.patch","merged_at":"2020-05-25T16:28:45Z"},"body":"This PR add the Spanish Squad versions 1 and 2 datasets. \r\nFixes #164 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/190\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/190\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/189","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/189\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/189\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/189\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/189","id":624048881,"node_id":"MDU6SXNzdWU2MjQwNDg4ODE=","number":189,"title":"[Question] BERT-style multiple choice formatting","user":{"login":"sarahwie","id":8027676,"node_id":"MDQ6VXNlcjgwMjc2NzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8027676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sarahwie","html_url":"https:\/\/github.com\/sarahwie","followers_url":"https:\/\/api.github.com\/users\/sarahwie\/followers","following_url":"https:\/\/api.github.com\/users\/sarahwie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sarahwie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sarahwie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sarahwie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sarahwie\/orgs","repos_url":"https:\/\/api.github.com\/users\/sarahwie\/repos","events_url":"https:\/\/api.github.com\/users\/sarahwie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sarahwie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-25T05:11:05Z","updated_at":"2020-05-25T18:38:28Z","closed_at":"2020-05-25T18:38:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello, I am wondering what the equivalent formatting of a dataset should be to allow for multiple-choice answering prediction, BERT-style. Previously, this was done by passing a list of `InputFeatures` to the dataloader instead of a list of `InputFeature`, where `InputFeatures` contained lists of length equal to the number of answer choices in the MCQ instead of single items. I'm a bit confused on what the output of my feature conversion function should be when using `dataset.map()` to ensure similar behavior.\r\n\r\nThanks!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/189\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/189\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/188","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/188\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/188\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/188\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/188","id":623890430,"node_id":"MDU6SXNzdWU2MjM4OTA0MzA=","number":188,"title":"When will the remaining math_dataset modules be added as dataset objects","user":{"login":"tylerroost","id":31251196,"node_id":"MDQ6VXNlcjMxMjUxMTk2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31251196?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tylerroost","html_url":"https:\/\/github.com\/tylerroost","followers_url":"https:\/\/api.github.com\/users\/tylerroost\/followers","following_url":"https:\/\/api.github.com\/users\/tylerroost\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tylerroost\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tylerroost\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tylerroost\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tylerroost\/orgs","repos_url":"https:\/\/api.github.com\/users\/tylerroost\/repos","events_url":"https:\/\/api.github.com\/users\/tylerroost\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tylerroost\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-24T15:46:52Z","updated_at":"2020-05-24T18:53:48Z","closed_at":"2020-05-24T18:53:48Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently only the algebra_linear_1d is supported. Is there a timeline for making the other modules supported. If no timeline is established, how can I help?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/188\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/188\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/187","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/187\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/187\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/187\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/187","id":623627800,"node_id":"MDU6SXNzdWU2MjM2Mjc4MDA=","number":187,"title":"[Question] How to load wikipedia ? Beam runner ?","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-23T10:18:52Z","updated_at":"2020-05-25T00:12:02Z","closed_at":"2020-05-25T00:12:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When `nlp.load_dataset('wikipedia')`, I got\r\n* `WARNING:nlp.builder:Trying to generate a dataset using Apache Beam, yet no Beam Runner or PipelineOptions() has been provided. Please pass a nlp.DownloadConfig(beam_runner=...) object to the builder.download_and_prepare(download_config=...) method. Default values will be used.`\r\n* `AttributeError: 'NoneType' object has no attribute 'size'`\r\n\r\nCould somebody tell me what should I do ? \r\n\r\n# Env\r\nOn Colab,\r\n```\r\ngit clone https:\/\/github.com\/huggingface\/nlp\r\ncd nlp\r\npip install -q .\r\n```\r\n```\r\n%pip install -q apache_beam mwparserfromhell\r\n-> ERROR: pydrive 1.3.1 has requirement oauth2client>=4.0.0, but you'll have oauth2client 3.0.0 which is incompatible.\r\nERROR: google-api-python-client 1.7.12 has requirement httplib2<1dev,>=0.17.0, but you'll have httplib2 0.12.0 which is incompatible.\r\nERROR: chainer 6.5.0 has requirement typing-extensions<=3.6.6, but you'll have typing-extensions 3.7.4.2 which is incompatible.\r\n```\r\n```\r\npip install -q apache-beam[interactive]\r\nERROR: google-colab 1.0.0 has requirement ipython~=5.5.0, but you'll have ipython 5.10.0 which is incompatible.\r\n```\r\n\r\n# The whole message\r\n```\r\nWARNING:nlp.builder:Trying to generate a dataset using Apache Beam, yet no Beam Runner or PipelineOptions() has been provided. Please pass a nlp.DownloadConfig(beam_runner=...) object to the builder.download_and_prepare(download_config=...) method. Default values will be used.\r\n\r\nDownloading and preparing dataset wikipedia\/20200501.aa (download: Unknown size, generated: Unknown size, total: Unknown size) to \/root\/.cache\/huggingface\/datasets\/wikipedia\/20200501.aa\/1.0.0...\r\n\r\n---------------------------------------------------------------------------\r\n\r\nAttributeError                            Traceback (most recent call last)\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/common.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.common.DoFnRunner.process()\r\n\r\n44 frames\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/common.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.common.PerWindowInvoker.invoke_process()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/common.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/io\/iobase.py in process(self, element, init_result)\r\n   1081       writer.write(e)\r\n-> 1082     return [window.TimestampedValue(writer.close(), timestamp.MAX_TIMESTAMP)]\r\n   1083 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/io\/filebasedsink.py in close(self)\r\n    422   def close(self):\r\n--> 423     self.sink.close(self.temp_handle)\r\n    424     return self.temp_shard_path\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/io\/parquetio.py in close(self, writer)\r\n    537     if len(self._buffer[0]) > 0:\r\n--> 538       self._flush_buffer()\r\n    539     if self._record_batches_byte_size > 0:\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/io\/parquetio.py in _flush_buffer(self)\r\n    569       for b in x.buffers():\r\n--> 570         size = size + b.size\r\n    571     self._record_batches_byte_size = self._record_batches_byte_size + size\r\n\r\nAttributeError: 'NoneType' object has no attribute 'size'\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nAttributeError                            Traceback (most recent call last)\r\n\r\n in ()\r\n----> 1 dset = nlp.load_dataset('wikipedia')\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n    518         download_mode=download_mode,\r\n    519         ignore_verifications=ignore_verifications,\r\n--> 520         save_infos=save_infos,\r\n    521     )\r\n    522 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, dl_manager, **download_and_prepare_kwargs)\r\n    370                 verify_infos = not save_infos and not ignore_verifications\r\n    371                 self._download_and_prepare(\r\n--> 372                     dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n    373                 )\r\n    374                 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in _download_and_prepare(self, dl_manager, verify_infos)\r\n    770         with beam.Pipeline(runner=beam_runner, options=beam_options,) as pipeline:\r\n    771             super(BeamBasedBuilder, self)._download_and_prepare(\r\n--> 772                 dl_manager, pipeline=pipeline, verify_infos=False\r\n    773             )  # TODO{beam} verify infos\r\n    774 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/pipeline.py in __exit__(self, exc_type, exc_val, exc_tb)\r\n    501   def __exit__(self, exc_type, exc_val, exc_tb):\r\n    502     if not exc_type:\r\n--> 503       self.run().wait_until_finish()\r\n    504 \r\n    505   def visit(self, visitor):\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/pipeline.py in run(self, test_runner_api)\r\n    481       return Pipeline.from_runner_api(\r\n    482           self.to_runner_api(use_fake_coders=True), self.runner,\r\n--> 483           self._options).run(False)\r\n    484 \r\n    485     if self._options.view_as(TypeOptions).runtime_type_check:\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/pipeline.py in run(self, test_runner_api)\r\n    494       finally:\r\n    495         shutil.rmtree(tmpdir)\r\n--> 496     return self.runner.run_pipeline(self, self._options)\r\n    497 \r\n    498   def __enter__(self):\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/direct\/direct_runner.py in run_pipeline(self, pipeline, options)\r\n    128       runner = BundleBasedDirectRunner()\r\n    129 \r\n--> 130     return runner.run_pipeline(pipeline, options)\r\n    131 \r\n    132 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/portability\/fn_api_runner.py in run_pipeline(self, pipeline, options)\r\n    553 \r\n    554     self._latest_run_result = self.run_via_runner_api(\r\n--> 555         pipeline.to_runner_api(default_environment=self._default_environment))\r\n    556     return self._latest_run_result\r\n    557 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/portability\/fn_api_runner.py in run_via_runner_api(self, pipeline_proto)\r\n    563     # TODO(pabloem, BEAM-7514): Create a watermark manager (that has access to\r\n    564     #   the teststream (if any), and all the stages).\r\n--> 565     return self.run_stages(stage_context, stages)\r\n    566 \r\n    567   @contextlib.contextmanager\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/portability\/fn_api_runner.py in run_stages(self, stage_context, stages)\r\n    704               stage,\r\n    705               pcoll_buffers,\r\n--> 706               stage_context.safe_coders)\r\n    707           metrics_by_stage[stage.name] = stage_results.process_bundle.metrics\r\n    708           monitoring_infos_by_stage[stage.name] = (\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/portability\/fn_api_runner.py in _run_stage(self, worker_handler_factory, pipeline_components, stage, pcoll_buffers, safe_coders)\r\n   1071         cache_token_generator=cache_token_generator)\r\n   1072 \r\n-> 1073     result, splits = bundle_manager.process_bundle(data_input, data_output)\r\n   1074 \r\n   1075     def input_for(transform_id, input_id):\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/portability\/fn_api_runner.py in process_bundle(self, inputs, expected_outputs)\r\n   2332 \r\n   2333     with UnboundedThreadPoolExecutor() as executor:\r\n-> 2334       for result, split_result in executor.map(execute, part_inputs):\r\n   2335 \r\n   2336         split_result_list += split_result\r\n\r\n\/usr\/lib\/python3.6\/concurrent\/futures\/_base.py in result_iterator()\r\n    584                     # Careful not to keep a reference to the popped future\r\n    585                     if timeout is None:\r\n--> 586                         yield fs.pop().result()\r\n    587                     else:\r\n    588                         yield fs.pop().result(end_time - time.monotonic())\r\n\r\n\/usr\/lib\/python3.6\/concurrent\/futures\/_base.py in result(self, timeout)\r\n    430                 raise CancelledError()\r\n    431             elif self._state == FINISHED:\r\n--> 432                 return self.__get_result()\r\n    433             else:\r\n    434                 raise TimeoutError()\r\n\r\n\/usr\/lib\/python3.6\/concurrent\/futures\/_base.py in __get_result(self)\r\n    382     def __get_result(self):\r\n    383         if self._exception:\r\n--> 384             raise self._exception\r\n    385         else:\r\n    386             return self._result\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/utils\/thread_pool_executor.py in run(self)\r\n     42       # If the future wasn't cancelled, then attempt to execute it.\r\n     43       try:\r\n---> 44         self._future.set_result(self._fn(*self._fn_args, **self._fn_kwargs))\r\n     45       except BaseException as exc:\r\n     46         # Even though Python 2 futures library has #set_exection(),\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/portability\/fn_api_runner.py in execute(part_map)\r\n   2329           self._registered,\r\n   2330           cache_token_generator=self._cache_token_generator)\r\n-> 2331       return bundle_manager.process_bundle(part_map, expected_outputs)\r\n   2332 \r\n   2333     with UnboundedThreadPoolExecutor() as executor:\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/portability\/fn_api_runner.py in process_bundle(self, inputs, expected_outputs)\r\n   2243             process_bundle_descriptor_id=self._bundle_descriptor.id,\r\n   2244             cache_tokens=[next(self._cache_token_generator)]))\r\n-> 2245     result_future = self._worker_handler.control_conn.push(process_bundle_req)\r\n   2246 \r\n   2247     split_results = []  # type: List[beam_fn_api_pb2.ProcessBundleSplitResponse]\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/portability\/fn_api_runner.py in push(self, request)\r\n   1557       self._uid_counter += 1\r\n   1558       request.instruction_id = 'control_%s' % self._uid_counter\r\n-> 1559     response = self.worker.do_instruction(request)\r\n   1560     return ControlFuture(request.instruction_id, response)\r\n   1561 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/worker\/sdk_worker.py in do_instruction(self, request)\r\n    413       # E.g. if register is set, this will call self.register(request.register))\r\n    414       return getattr(self, request_type)(\r\n--> 415           getattr(request, request_type), request.instruction_id)\r\n    416     else:\r\n    417       raise NotImplementedError\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/worker\/sdk_worker.py in process_bundle(self, request, instruction_id)\r\n    448         with self.maybe_profile(instruction_id):\r\n    449           delayed_applications, requests_finalization = (\r\n--> 450               bundle_processor.process_bundle(instruction_id))\r\n    451           monitoring_infos = bundle_processor.monitoring_infos()\r\n    452           monitoring_infos.extend(self.state_cache_metrics_fn())\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/worker\/bundle_processor.py in process_bundle(self, instruction_id)\r\n    837         for data in data_channel.input_elements(instruction_id,\r\n    838                                                 expected_transforms):\r\n--> 839           input_op_by_transform_id[data.transform_id].process_encoded(data.data)\r\n    840 \r\n    841       # Finish all operations.\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/worker\/bundle_processor.py in process_encoded(self, encoded_windowed_values)\r\n    214       decoded_value = self.windowed_coder_impl.decode_from_stream(\r\n    215           input_stream, True)\r\n--> 216       self.output(decoded_value)\r\n    217 \r\n    218   def try_split(self, fraction_of_remainder, total_buffer_size):\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/worker\/operations.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.worker.operations.Operation.output()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/worker\/operations.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.worker.operations.Operation.output()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/worker\/operations.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.worker.operations.SingletonConsumerSet.receive()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/worker\/operations.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.worker.operations.DoOperation.process()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/worker\/operations.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.worker.operations.DoOperation.process()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/common.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.common.DoFnRunner.process()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/common.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.common.DoFnRunner._reraise_augmented()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/future\/utils\/__init__.py in raise_with_traceback(exc, traceback)\r\n    417         if traceback == Ellipsis:\r\n    418             _, _, traceback = sys.exc_info()\r\n--> 419         raise exc.with_traceback(traceback)\r\n    420 \r\n    421 else:\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/common.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.common.DoFnRunner.process()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/common.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.common.PerWindowInvoker.invoke_process()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/runners\/common.cpython-36m-x86_64-linux-gnu.so in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window()\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/io\/iobase.py in process(self, element, init_result)\r\n   1080     for e in bundle[1]:  # values\r\n   1081       writer.write(e)\r\n-> 1082     return [window.TimestampedValue(writer.close(), timestamp.MAX_TIMESTAMP)]\r\n   1083 \r\n   1084 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/io\/filebasedsink.py in close(self)\r\n    421 \r\n    422   def close(self):\r\n--> 423     self.sink.close(self.temp_handle)\r\n    424     return self.temp_shard_path\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/io\/parquetio.py in close(self, writer)\r\n    536   def close(self, writer):\r\n    537     if len(self._buffer[0]) > 0:\r\n--> 538       self._flush_buffer()\r\n    539     if self._record_batches_byte_size > 0:\r\n    540       self._write_batches(writer)\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/apache_beam\/io\/parquetio.py in _flush_buffer(self)\r\n    568     for x in arrays:\r\n    569       for b in x.buffers():\r\n--> 570         size = size + b.size\r\n    571     self._record_batches_byte_size = self._record_batches_byte_size + size\r\n\r\nAttributeError: 'NoneType' object has no attribute 'size' [while running 'train\/Save to parquet\/Write\/WriteImpl\/WriteBundles']\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/187\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/187\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/186","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/186\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/186\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/186\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/186","id":623595180,"node_id":"MDU6SXNzdWU2MjM1OTUxODA=","number":186,"title":"Weird-ish: Not creating unique caches for different phases","user":{"login":"zphang","id":1668462,"node_id":"MDQ6VXNlcjE2Njg0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1668462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zphang","html_url":"https:\/\/github.com\/zphang","followers_url":"https:\/\/api.github.com\/users\/zphang\/followers","following_url":"https:\/\/api.github.com\/users\/zphang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zphang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zphang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zphang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zphang\/orgs","repos_url":"https:\/\/api.github.com\/users\/zphang\/repos","events_url":"https:\/\/api.github.com\/users\/zphang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zphang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-23T06:40:58Z","updated_at":"2020-05-23T20:22:18Z","closed_at":"2020-05-23T20:22:17Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Sample code:\r\n\r\n```python\r\nimport nlp\r\ndataset = nlp.load_dataset('boolq')\r\n\r\ndef func1(x):\r\n    return x\r\n\r\ndef func2(x):\r\n    return None\r\n\r\ntrain_output = dataset[\"train\"].map(func1)\r\nvalid_output = dataset[\"validation\"].map(func1)\r\nprint()\r\nprint(len(train_output), len(valid_output))\r\n# Output: 9427 9427\r\n```\r\n\r\nThe map method in both cases seem to be pointing to the same cache, so the latter call based on the validation data will return the processed train data cache.\r\n\r\nWhat's weird is that the following doesn't seem to be an issue:\r\n\r\n```python\r\ntrain_output = dataset[\"train\"].map(func2)\r\nvalid_output = dataset[\"validation\"].map(func2)\r\nprint()\r\nprint(len(train_output), len(valid_output))\r\n# 9427 3270\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/186\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/186\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/185","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/185\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/185\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/185\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/185","id":623172484,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIxODkxNjY2","number":185,"title":"[Commands] In-detail instructions to create dummy data folder","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-22T12:26:25Z","updated_at":"2020-05-22T14:06:35Z","closed_at":"2020-05-22T14:06:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/185","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/185","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/185.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/185.patch","merged_at":"2020-05-22T14:06:34Z"},"body":"### Dummy data command \r\n\r\nThis PR adds a new command `python nlp-cli dummy_data ` that gives in-detail instructions on how to add the dummy data files. \r\n\r\nIt would be great if you can try it out by moving the current dummy_data folder of any dataset in `.\/datasets` with `mv datasets\/\/dummy_data datasets\/\/dummy_data_copy` and running the command `python nlp-cli dummy_data .\/datasets\/` to see if you like the instructions. \r\n\r\n### CONTRIBUTING.md\r\nAlso the CONTRIBUTING.md is made cleaner including a new section on \"How to add a dataset\". \r\n\r\n### Current PRs \r\nIt would be nice if we can try out if this command helps current PRs, *e.g.* #169  to add a dataset. I comment on those PRs.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/185\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/185\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/184","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/184\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/184\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/184\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/184","id":623120929,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIxODQ5MTQ3","number":184,"title":"Use IndexError instead of ValueError when index out of range","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-22T10:43:42Z","updated_at":"2020-05-28T08:31:18Z","closed_at":"2020-05-28T08:31:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/184","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/184","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/184.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/184.patch","merged_at":"2020-05-28T08:31:18Z"},"body":"**`default __iter__ needs IndexError`**.\r\n\r\nWhen I want to create a wrapper of arrow dataset to adapt to fastai,\r\nI don't know how to initialize it, so I didn't use inheritance but use object composition.\r\nI wrote sth like this.\r\n```\r\nclas HF_dataset():\r\n  def __init__(self, arrow_dataset):\r\n    self.dset = arrow_dataset\r\n  def __getitem__(self, i):\r\n    return self.my_get_item(self.dset)\r\n```\r\nBut `for sample in my_dataset:` gave me `ValueError(f\"Index ({key}) outside of table length ({self._data.num_rows}).\")` . This is because default `__iter__` will stop when it catched `IndexError`.\r\n\r\nYou can also see my [work](https:\/\/github.com\/richardyy1188\/Pretrain-MLM-and-finetune-on-GLUE-with-fastai\/blob\/master\/GLUE_with_fastai.ipynb) that uses fastai2 to show\/load batches from huggingface\/nlp GLUE datasets\r\n\r\nSo I hope we can use `IndexError` instead to let other people who want to wrap it for any purpose won't be caught by this caveat.\r\n\r\nBTW, I super appreciate your work, both transformers and nlp save my life. \ud83d\udc96\ud83d\udc96\ud83d\udc96\ud83d\udc96\ud83d\udc96\ud83d\udc96\ud83d\udc96\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/184\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/184\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/183","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/183\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/183\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/183\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/183","id":623054270,"node_id":"MDU6SXNzdWU2MjMwNTQyNzA=","number":183,"title":"[Bug] labels of glue\/ax are all -1 ","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2020-05-22T08:43:36Z","updated_at":"2020-05-22T22:14:05Z","closed_at":"2020-05-22T22:14:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"```\r\nax = nlp.load_dataset('glue', 'ax')\r\nfor i in range(30): print(ax['test'][i]['label'], end=', ')\r\n```\r\n```\r\n-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, \r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/183\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/183\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/182","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/182\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/182\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/182\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/182","id":622646770,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIxNDcxMjg4","number":182,"title":"Update newsroom.py","user":{"login":"yoavartzi","id":3289873,"node_id":"MDQ6VXNlcjMyODk4NzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3289873?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yoavartzi","html_url":"https:\/\/github.com\/yoavartzi","followers_url":"https:\/\/api.github.com\/users\/yoavartzi\/followers","following_url":"https:\/\/api.github.com\/users\/yoavartzi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yoavartzi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yoavartzi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yoavartzi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yoavartzi\/orgs","repos_url":"https:\/\/api.github.com\/users\/yoavartzi\/repos","events_url":"https:\/\/api.github.com\/users\/yoavartzi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yoavartzi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2020-05-21T17:07:43Z","updated_at":"2020-05-22T16:38:23Z","closed_at":"2020-05-22T16:38:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/182","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/182","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/182.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/182.patch","merged_at":"2020-05-22T16:38:23Z"},"body":"Updated the URL for Newsroom download so it's more robust to future changes.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/182\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/182\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/181","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/181\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/181\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/181\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/181","id":622634420,"node_id":"MDU6SXNzdWU2MjI2MzQ0MjA=","number":181,"title":"Cannot upload my own dataset","user":{"login":"korakot","id":3155646,"node_id":"MDQ6VXNlcjMxNTU2NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3155646?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/korakot","html_url":"https:\/\/github.com\/korakot","followers_url":"https:\/\/api.github.com\/users\/korakot\/followers","following_url":"https:\/\/api.github.com\/users\/korakot\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/korakot\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/korakot\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/korakot\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/korakot\/orgs","repos_url":"https:\/\/api.github.com\/users\/korakot\/repos","events_url":"https:\/\/api.github.com\/users\/korakot\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/korakot\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-05-21T16:45:52Z","updated_at":"2020-06-18T22:14:42Z","closed_at":"2020-06-18T22:14:42Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I look into `nlp-cli` and `user.py` to learn how to upload my own data.\r\n\r\nIt is supposed to work like this\r\n- Register to get username, password at huggingface.co\r\n- `nlp-cli login` and type username, passworld\r\n- I have a single file to upload at `.\/ttc\/ttc_freq_extra.csv`\r\n- `nlp-cli upload ttc\/ttc_freq_extra.csv`\r\n\r\nBut I got this error.\r\n\r\n```\r\n2020-05-21 16:33:52.722464: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:44] Successfully opened dynamic library libcudart.so.10.1\r\nAbout to upload file \/content\/ttc\/ttc_freq_extra.csv to S3 under filename ttc\/ttc_freq_extra.csv and namespace korakot\r\nProceed? [Y\/n] y\r\nUploading... This might take a while if files are large\r\nTraceback (most recent call last):\r\n  File \"\/usr\/local\/bin\/nlp-cli\", line 33, in \r\n    service.run()\r\n  File \"\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/commands\/user.py\", line 234, in run\r\n    token=token, filename=filename, filepath=filepath, organization=self.args.organization\r\n  File \"\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/hf_api.py\", line 141, in presign_and_upload\r\n    urls = self.presign(token, filename=filename, organization=organization)\r\n  File \"\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/hf_api.py\", line 132, in presign\r\n    return PresignedUrl(**d)\r\nTypeError: __init__() got an unexpected keyword argument 'cdn'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/181\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/181\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/180","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/180\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/180\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/180\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/180","id":622556861,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIxMzk5Nzg2","number":180,"title":"Add hall of fame","user":{"login":"clmnt","id":821155,"node_id":"MDQ6VXNlcjgyMTE1NQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/821155?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/clmnt","html_url":"https:\/\/github.com\/clmnt","followers_url":"https:\/\/api.github.com\/users\/clmnt\/followers","following_url":"https:\/\/api.github.com\/users\/clmnt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/clmnt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/clmnt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/clmnt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/clmnt\/orgs","repos_url":"https:\/\/api.github.com\/users\/clmnt\/repos","events_url":"https:\/\/api.github.com\/users\/clmnt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/clmnt\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-21T14:53:48Z","updated_at":"2020-05-22T16:35:16Z","closed_at":"2020-05-22T16:35:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/180","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/180","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/180.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/180.patch","merged_at":"2020-05-22T16:35:14Z"},"body":"powered by https:\/\/github.com\/sourcerer-io\/hall-of-fame","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/180\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/180\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/179","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/179\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/179\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/179\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/179","id":622525410,"node_id":"MDU6SXNzdWU2MjI1MjU0MTA=","number":179,"title":"[Feature request] separate split name and split instructions","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2020-05-21T14:10:51Z","updated_at":"2020-05-22T13:31:08Z","closed_at":"2020-05-22T13:31:07Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Currently, the name of an nlp.NamedSplit is parsed in arrow_reader.py and used as the instruction.\r\n\r\nThis makes it impossible to have several training sets, which can occur when:\r\n- A dataset corresponds to a collection of sub-datasets\r\n- A dataset was built in stages, adding new examples at each stage\r\n\r\nWould it be possible to have two separate fields in the Split class, a name \/instruction and a unique ID that is used as the key in the builder's split_dict ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/179\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/179\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/178","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/178\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/178\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/178\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/178","id":621979849,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIwOTMyMDI5","number":178,"title":"[Manual data] improve error message for manual data in general","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-20T18:10:45Z","updated_at":"2020-05-20T18:18:52Z","closed_at":"2020-05-20T18:18:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/178","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/178","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/178.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/178.patch","merged_at":"2020-05-20T18:18:50Z"},"body":"`nlp.load(\"xsum\")` now leads to the following error message:\r\n\r\n![Screenshot from 2020-05-20 20-05-28](https:\/\/user-images.githubusercontent.com\/23423619\/82481825-3587ea00-9ad6-11ea-9ca2-5794252c6ac7.png)\r\n\r\nI guess the manual download instructions for `xsum` can also be improved.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/178\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/178\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/177","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/177\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/177\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/177\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/177","id":621975368,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIwOTI4MzE0","number":177,"title":"Xsum manual download instruction","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-20T18:02:41Z","updated_at":"2020-05-20T18:16:50Z","closed_at":"2020-05-20T18:16:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/177","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/177","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/177.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/177.patch","merged_at":"2020-05-20T18:16:49Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/177\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/177\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/176","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/176\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/176\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/176\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/176","id":621934638,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIwODkzNDky","number":176,"title":"[Tests] Refactor MockDownloadManager","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-20T17:07:36Z","updated_at":"2020-05-20T18:17:19Z","closed_at":"2020-05-20T18:17:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/176","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/176","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/176.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/176.patch","merged_at":"2020-05-20T18:17:18Z"},"body":"Clean mock download manager class. \r\nThe print function was not of much help I think. \r\nWe should think about adding a command that creates the dummy folder structure for the user.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/176\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/176\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/175","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/175\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/175\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/175\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/175","id":621929428,"node_id":"MDU6SXNzdWU2MjE5Mjk0Mjg=","number":175,"title":"[Manual data dir] Error message: nlp.load_dataset('xsum') -> TypeError","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-20T17:00:32Z","updated_at":"2020-05-20T18:18:50Z","closed_at":"2020-05-20T18:18:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"v 0.1.0 from pip\r\n\r\n```python\r\nimport nlp\r\nxsum = nlp.load_dataset('xsum')\r\n```\r\n\r\nIssue is `dl_manager.manual_dir`is `None`\r\n\r\n```python\r\n\r\n---------------------------------------------------------------------------\r\nTypeError                                 Traceback (most recent call last)\r\n in \r\n----> 1 xsum = nlp.load_dataset('xsum')\r\n\r\n~\/miniconda3\/envs\/nb\/lib\/python3.7\/site-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n    515         download_mode=download_mode,\r\n    516         ignore_verifications=ignore_verifications,\r\n--> 517         save_infos=save_infos,\r\n    518     )\r\n    519 \r\n\r\n~\/miniconda3\/envs\/nb\/lib\/python3.7\/site-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, dl_manager, **download_and_prepare_kwargs)\r\n    361                 verify_infos = not save_infos and not ignore_verifications\r\n    362                 self._download_and_prepare(\r\n--> 363                     dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n    364                 )\r\n    365                 # Sync info\r\n\r\n~\/miniconda3\/envs\/nb\/lib\/python3.7\/site-packages\/nlp\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n    397         split_dict = SplitDict(dataset_name=self.name)\r\n    398         split_generators_kwargs = self._make_split_generators_kwargs(prepare_split_kwargs)\r\n--> 399         split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n    400         # Checksums verification\r\n    401         if verify_infos:\r\n\r\n~\/miniconda3\/envs\/nb\/lib\/python3.7\/site-packages\/nlp\/datasets\/xsum\/5c5fca23aaaa469b7a1c6f095cf12f90d7ab99bcc0d86f689a74fd62634a1472\/xsum.py in _split_generators(self, dl_manager)\r\n    102         with open(dl_path, \"r\") as json_file:\r\n    103             split_ids = json.load(json_file)\r\n--> 104         downloaded_path = os.path.join(dl_manager.manual_dir, \"xsum-extracts-from-downloads\")\r\n    105         return [\r\n    106             nlp.SplitGenerator(\r\n\r\n~\/miniconda3\/envs\/nb\/lib\/python3.7\/posixpath.py in join(a, *p)\r\n     78     will be discarded.  An empty last part will result in a path that\r\n     79     ends with a separator.\"\"\"\r\n---> 80     a = os.fspath(a)\r\n     81     sep = _get_sep(a)\r\n     82     path = a\r\n\r\nTypeError: expected str, bytes or os.PathLike object, not NoneType\r\n\r\n\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/175\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/175\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/174","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/174\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/174\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/174\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/174","id":621928403,"node_id":"MDU6SXNzdWU2MjE5Mjg0MDM=","number":174,"title":"nlp.load_dataset('xsum') -> TypeError","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-20T16:59:09Z","updated_at":"2020-05-20T17:43:46Z","closed_at":"2020-05-20T17:43:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/174\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/174\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/173","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/173\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/173\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/173\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/173","id":621764932,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIwNzUyNzQy","number":173,"title":"Rm extracted test dirs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-20T13:30:48Z","updated_at":"2020-05-22T16:34:36Z","closed_at":"2020-05-22T16:34:35Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/173","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/173","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/173.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/173.patch","merged_at":"2020-05-22T16:34:35Z"},"body":"All the dummy data used for tests were duplicated. For each dataset, we had one zip file but also its extracted directory. I removed all these directories\r\n\r\nFurthermore instead of extracting next to the dummy_data.zip file, we extract in the temp `cached_dir` used for tests, so that all the extracted directories get removed after testing.\r\n\r\nFinally there was a bug in the `mock_download_manager` that would let it create directories with invalid names, as in #172. I fixed that by encoding url arguments. I had to rename the dummy data for `scientific_papers` and `cnn_dailymail` (the aws tests don't pass for those 2 in this PR, but they will once aws will be synced, as the local ones do)\r\n\r\nLet me know if it sounds good to you @patrickvonplaten . I'm still not entirely familiar with the mock downloader","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/173\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/173\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/172","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/172\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/172\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/172\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/172","id":621377386,"node_id":"MDU6SXNzdWU2MjEzNzczODY=","number":172,"title":"Clone not working on Windows environment","user":{"login":"codehunk628","id":51091425,"node_id":"MDQ6VXNlcjUxMDkxNDI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/51091425?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/codehunk628","html_url":"https:\/\/github.com\/codehunk628","followers_url":"https:\/\/api.github.com\/users\/codehunk628\/followers","following_url":"https:\/\/api.github.com\/users\/codehunk628\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/codehunk628\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/codehunk628\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/codehunk628\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/codehunk628\/orgs","repos_url":"https:\/\/api.github.com\/users\/codehunk628\/repos","events_url":"https:\/\/api.github.com\/users\/codehunk628\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/codehunk628\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2020-05-20T00:45:14Z","updated_at":"2020-05-23T12:49:13Z","closed_at":"2020-05-23T11:27:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Cloning in a windows environment is not working because of use of special character '?' in folder name ..\r\nPlease consider changing the folder name ....\r\nReference to folder -\r\nnlp\/datasets\/cnn_dailymail\/dummy\/3.0.0\/3.0.0\/dummy_data-zip-extracted\/dummy_data\/uc?export=download&id=0BwmD_VLjROrfM1BxdkxVaTY2bWs\/dailymail\/stories\/\r\n\r\nerror log:\r\nfatal: cannot create directory at 'datasets\/cnn_dailymail\/dummy\/3.0.0\/3.0.0\/dummy_data-zip-extracted\/dummy_data\/uc?export=download&id=0BwmD_VLjROrfM1BxdkxVaTY2bWs': Invalid argument\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/172\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/172\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/171","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/171\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/171\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/171\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/171","id":621199128,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIwMjk0ODM0","number":171,"title":"fix squad metric format","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-05-19T18:37:36Z","updated_at":"2020-05-22T13:36:50Z","closed_at":"2020-05-22T13:36:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/171","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/171","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/171.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/171.patch","merged_at":"2020-05-22T13:36:48Z"},"body":"The format of the squad metric was wrong.\r\nThis should fix #143 \r\n\r\nI tested with\r\n```python3\r\npredictions = [\r\n    {'id': '56be4db0acb8001400a502ec', 'prediction_text': 'Denver Broncos'}\r\n]\r\nreferences = [\r\n    {'answers': [{'text': 'Denver Broncos'}], 'id': '56be4db0acb8001400a502ec'}\r\n]\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/171\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/171\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/170","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/170\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/170\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/170\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/170","id":621119747,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIwMjMwMDIx","number":170,"title":"Rename anli dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-19T16:26:57Z","updated_at":"2020-05-20T12:23:09Z","closed_at":"2020-05-20T12:23:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/170","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/170","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/170.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/170.patch","merged_at":"2020-05-20T12:23:07Z"},"body":"What we have now as the `anli` dataset is actually the \u03b1NLI dataset from the ART challenge dataset. This name is confusing because `anli` is also the name of adversarial NLI (see [https:\/\/github.com\/facebookresearch\/anli](https:\/\/github.com\/facebookresearch\/anli)).\r\n\r\nI renamed the current `anli` dataset by `art`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/170\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/170\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/169","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/169\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/169\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/169\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/169","id":621099682,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIwMjE1NDkw","number":169,"title":"Adding Qanta (Quizbowl) Dataset","user":{"login":"EntilZha","id":1382460,"node_id":"MDQ6VXNlcjEzODI0NjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1382460?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/EntilZha","html_url":"https:\/\/github.com\/EntilZha","followers_url":"https:\/\/api.github.com\/users\/EntilZha\/followers","following_url":"https:\/\/api.github.com\/users\/EntilZha\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/EntilZha\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/EntilZha\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/EntilZha\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/EntilZha\/orgs","repos_url":"https:\/\/api.github.com\/users\/EntilZha\/repos","events_url":"https:\/\/api.github.com\/users\/EntilZha\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/EntilZha\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2020-05-19T16:03:01Z","updated_at":"2020-05-26T12:52:31Z","closed_at":"2020-05-26T12:52:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/169","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/169","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/169.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/169.patch","merged_at":null},"body":"This PR adds the qanta question answering datasets from [Quizbowl: The Case for Incremental Question Answering](https:\/\/arxiv.org\/abs\/1904.04792) and [Trick Me If You Can: Human-in-the-loop Generation of Adversarial Question Answering Examples](https:\/\/www.aclweb.org\/anthology\/Q19-1029\/) (adversarial fold)\r\n\r\nThis partially continues a discussion around fixing dummy data from https:\/\/github.com\/huggingface\/nlp\/issues\/161\r\n\r\nI ran the following code to double check that it works and did some sanity checks on the output. The majority of the code itself is from our `allennlp` version of the dataset reader.\r\n\r\n```python\r\nimport nlp\r\n# Default is full question\r\ndata = nlp.load_dataset('.\/datasets\/qanta') \r\n# Four configs\r\n# Primarily useful for training\r\ndata = nlp.load_dataset('.\/datasets\/qanta', 'mode=sentences,char_skip=25')  \r\n# Primarily used in evaluation\r\ndata = nlp.load_dataset('.\/datasets\/qanta', 'mode=first,char_skip=25')  \r\ndata = nlp.load_dataset('.\/datasets\/qanta', 'mode=full,char_skip=25')  \r\n# Primarily useful in evaluation and \"live\" play\r\ndata = nlp.load_dataset('.\/datasets\/qanta', 'mode=runs,char_skip=25')  \r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/169\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/169\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/168","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/168\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/168\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/168\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/168","id":620959819,"node_id":"MDU6SXNzdWU2MjA5NTk4MTk=","number":168,"title":"Loading 'wikitext' dataset fails","user":{"login":"itay1itzhak","id":25987633,"node_id":"MDQ6VXNlcjI1OTg3NjMz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25987633?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/itay1itzhak","html_url":"https:\/\/github.com\/itay1itzhak","followers_url":"https:\/\/api.github.com\/users\/itay1itzhak\/followers","following_url":"https:\/\/api.github.com\/users\/itay1itzhak\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/itay1itzhak\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/itay1itzhak\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/itay1itzhak\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/itay1itzhak\/orgs","repos_url":"https:\/\/api.github.com\/users\/itay1itzhak\/repos","events_url":"https:\/\/api.github.com\/users\/itay1itzhak\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/itay1itzhak\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-05-19T13:04:29Z","updated_at":"2020-05-26T21:46:52Z","closed_at":"2020-05-26T21:46:52Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Loading the 'wikitext' dataset fails with Attribute error:\r\n\r\nCode to reproduce (From example notebook):\r\n\r\nimport nlp\r\nwikitext_dataset = nlp.load_dataset('wikitext')\r\n\r\n\r\nError:\r\n---------------------------------------------------------------------------\r\nAttributeError                            Traceback (most recent call last)\r\n in ()\r\n     11 \r\n     12 # Load a dataset and print the first examples in the training set\r\n---> 13 wikitext_dataset = nlp.load_dataset('wikitext')\r\n     14 print(wikitext_dataset['train'][0])\r\n\r\n6 frames\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n    518         download_mode=download_mode,\r\n    519         ignore_verifications=ignore_verifications,\r\n--> 520         save_infos=save_infos,\r\n    521     )\r\n    522 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, dl_manager, **download_and_prepare_kwargs)\r\n    363                 verify_infos = not save_infos and not ignore_verifications\r\n    364                 self._download_and_prepare(\r\n--> 365                     dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n    366                 )\r\n    367                 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n    416             try:\r\n    417                 # Prepare split will record examples associated to the split\r\n--> 418                 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n    419             except OSError:\r\n    420                 raise OSError(\"Cannot find data file. \" + (self.MANUAL_DOWNLOAD_INSTRUCTIONS or \"\"))\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in _prepare_split(self, split_generator)\r\n    594             example = self.info.features.encode_example(record)\r\n    595             writer.write(example)\r\n--> 596         num_examples, num_bytes = writer.finalize()\r\n    597 \r\n    598         assert num_examples == num_examples, f\"Expected to write {split_info.num_examples} but wrote {num_examples}\"\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/arrow_writer.py in finalize(self, close_stream)\r\n    173     def finalize(self, close_stream=True):\r\n    174         if self.pa_writer is not None:\r\n--> 175             self.write_on_file()\r\n    176             self.pa_writer.close()\r\n    177         if close_stream:\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/arrow_writer.py in write_on_file(self)\r\n    124             else:\r\n    125                 # All good\r\n--> 126                 self._write_array_on_file(pa_array)\r\n    127             self.current_rows = []\r\n    128 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/arrow_writer.py in _write_array_on_file(self, pa_array)\r\n     93     def _write_array_on_file(self, pa_array):\r\n     94         \"\"\"Write a PyArrow Array\"\"\"\r\n---> 95         pa_batch = pa.RecordBatch.from_struct_array(pa_array)\r\n     96         self._num_bytes += pa_array.nbytes\r\n     97         self.pa_writer.write_batch(pa_batch)\r\n\r\nAttributeError: type object 'pyarrow.lib.RecordBatch' has no attribute 'from_struct_array'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/168\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/168\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/167","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/167\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/167\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/167\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/167","id":620908786,"node_id":"MDExOlB1bGxSZXF1ZXN0NDIwMDY0NDMw","number":167,"title":"[Tests] refactor tests","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-19T11:43:32Z","updated_at":"2020-05-19T16:17:12Z","closed_at":"2020-05-19T16:17:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/167","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/167","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/167.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/167.patch","merged_at":"2020-05-19T16:17:10Z"},"body":"This PR separates AWS and Local tests to remove these ugly statements in the script:\r\n```python\r\n        if \"\/\" not in dataset_name:\r\n            logging.info(\"Skip {} because it is a canonical dataset\")\r\n            return\r\n```\r\n\r\nTo run a `aws` test, one should now run the following command: \r\n\r\n```python \r\npytest -s tests\/test_dataset_common.py::AWSDatasetTest::test_builder_class_wmt14\r\n```\r\n\r\nThe same `local` test, can be run with:\r\n```python \r\npytest -s tests\/test_dataset_common.py::LocalDatasetTest::test_builder_class_wmt14\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/167\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/167\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/166","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/166\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/166\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/166\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/166","id":620850218,"node_id":"MDU6SXNzdWU2MjA4NTAyMTg=","number":166,"title":"Add a method to shuffle a dataset","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-05-19T10:08:46Z","updated_at":"2020-06-23T15:07:33Z","closed_at":"2020-06-23T15:07:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Could maybe be a `dataset.shuffle(generator=None, seed=None)` signature method.\r\n\r\nAlso, we could maybe have a clear indication of which method modify in-place and which methods return\/cache a modified dataset. I kinda like torch conversion of having an underscore suffix for all the methods which modify a dataset in-place. What do you think?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/166\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/166\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/165","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/165\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/165\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/165\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/165","id":620758221,"node_id":"MDU6SXNzdWU2MjA3NTgyMjE=","number":165,"title":"ANLI","user":{"login":"douwekiela","id":6024930,"node_id":"MDQ6VXNlcjYwMjQ5MzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6024930?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/douwekiela","html_url":"https:\/\/github.com\/douwekiela","followers_url":"https:\/\/api.github.com\/users\/douwekiela\/followers","following_url":"https:\/\/api.github.com\/users\/douwekiela\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/douwekiela\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/douwekiela\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/douwekiela\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/douwekiela\/orgs","repos_url":"https:\/\/api.github.com\/users\/douwekiela\/repos","events_url":"https:\/\/api.github.com\/users\/douwekiela\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/douwekiela\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-19T07:50:57Z","updated_at":"2020-05-20T12:23:07Z","closed_at":"2020-05-20T12:23:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Can I recommend the following:\r\n\r\nFor ANLI, use https:\/\/github.com\/facebookresearch\/anli. As that paper says, \"Our dataset is not\r\nto be confused with abductive NLI (Bhagavatula et al., 2019), which calls itself \u03b1NLI, or ART.\". \r\n\r\nIndeed, the paper cited under what is currently called anli says in the abstract \"We introduce a challenge dataset, ART\".\r\n\r\nThe current naming will confuse people :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/165\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/165\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/164","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/164\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/164\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/164\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/164","id":620540250,"node_id":"MDU6SXNzdWU2MjA1NDAyNTA=","number":164,"title":"Add Spanish POR and NER Datasets","user":{"login":"mrm8488","id":3653789,"node_id":"MDQ6VXNlcjM2NTM3ODk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3653789?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mrm8488","html_url":"https:\/\/github.com\/mrm8488","followers_url":"https:\/\/api.github.com\/users\/mrm8488\/followers","following_url":"https:\/\/api.github.com\/users\/mrm8488\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mrm8488\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mrm8488\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mrm8488\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mrm8488\/orgs","repos_url":"https:\/\/api.github.com\/users\/mrm8488\/repos","events_url":"https:\/\/api.github.com\/users\/mrm8488\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mrm8488\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-18T22:18:21Z","updated_at":"2020-05-25T16:28:45Z","closed_at":"2020-05-25T16:28:45Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi guys,\r\nIn order to cover multilingual support a little step could be adding standard Datasets used for Spanish NER and POS tasks.\r\nI can provide it in raw and preprocessed formats.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/164\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/164\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/163","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/163\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/163\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/163\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/163","id":620534307,"node_id":"MDU6SXNzdWU2MjA1MzQzMDc=","number":163,"title":"[Feature request] Add cos-e v1.0","user":{"login":"sarahwie","id":8027676,"node_id":"MDQ6VXNlcjgwMjc2NzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8027676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sarahwie","html_url":"https:\/\/github.com\/sarahwie","followers_url":"https:\/\/api.github.com\/users\/sarahwie\/followers","following_url":"https:\/\/api.github.com\/users\/sarahwie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sarahwie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sarahwie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sarahwie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sarahwie\/orgs","repos_url":"https:\/\/api.github.com\/users\/sarahwie\/repos","events_url":"https:\/\/api.github.com\/users\/sarahwie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sarahwie\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-05-18T22:05:26Z","updated_at":"2020-06-16T23:15:25Z","closed_at":"2020-06-16T18:52:06Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I noticed the second release of cos-e (v1.11) is included in this repo. I wanted to request inclusion of v1.0, since this is the version on which results are reported on in [the paper](https:\/\/www.aclweb.org\/anthology\/P19-1487\/), and v1.11 has noted [annotation](https:\/\/github.com\/salesforce\/cos-e\/issues\/2) [issues](https:\/\/arxiv.org\/pdf\/2004.14546.pdf).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/163\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/163\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/162","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/162\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/162\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/162\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/162","id":620513554,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE5NzQ4Mzky","number":162,"title":"fix prev files hash in map","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-18T21:20:51Z","updated_at":"2020-05-18T21:36:21Z","closed_at":"2020-05-18T21:36:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/162","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/162","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/162.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/162.patch","merged_at":"2020-05-18T21:36:20Z"},"body":"Fix the `.map` issue in #160.\r\nThis makes sure it takes the previous files when computing the hash.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/162\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/162\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/161","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/161\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/161\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/161\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/161","id":620487535,"node_id":"MDU6SXNzdWU2MjA0ODc1MzU=","number":161,"title":"Discussion on version identifier & MockDataLoaderManager for test data","user":{"login":"EntilZha","id":1382460,"node_id":"MDQ6VXNlcjEzODI0NjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1382460?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/EntilZha","html_url":"https:\/\/github.com\/EntilZha","followers_url":"https:\/\/api.github.com\/users\/EntilZha\/followers","following_url":"https:\/\/api.github.com\/users\/EntilZha\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/EntilZha\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/EntilZha\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/EntilZha\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/EntilZha\/orgs","repos_url":"https:\/\/api.github.com\/users\/EntilZha\/repos","events_url":"https:\/\/api.github.com\/users\/EntilZha\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/EntilZha\/received_events","type":"User","site_admin":false},"labels":[{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":12,"created_at":"2020-05-18T20:31:30Z","updated_at":"2020-05-24T18:10:03Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hi, I'm working on adding a dataset and ran into an error due to `download` not being defined on `MockDataLoaderManager`, but being defined in `nlp\/utils\/download_manager.py`. The readme step running this: `RUN_SLOW=1 pytest tests\/test_dataset_common.py::DatasetTest::test_load_real_dataset_localmydatasetname` triggers the error. If I can get something to work, I can include it in my data PR once I'm done.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/161\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/161\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/160","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/160\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/160\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/160\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/160","id":620448236,"node_id":"MDU6SXNzdWU2MjA0NDgyMzY=","number":160,"title":"caching in map causes same result to be returned for train, validation and test","user":{"login":"dpressel","id":247881,"node_id":"MDQ6VXNlcjI0Nzg4MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/247881?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dpressel","html_url":"https:\/\/github.com\/dpressel","followers_url":"https:\/\/api.github.com\/users\/dpressel\/followers","following_url":"https:\/\/api.github.com\/users\/dpressel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dpressel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dpressel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dpressel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dpressel\/orgs","repos_url":"https:\/\/api.github.com\/users\/dpressel\/repos","events_url":"https:\/\/api.github.com\/users\/dpressel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dpressel\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2020-05-18T19:22:03Z","updated_at":"2020-05-18T21:36:20Z","closed_at":"2020-05-18T21:36:20Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"hello,\r\n\r\nI am working on a program that uses the `nlp` library with the `SST2` dataset.\r\n\r\nThe rough outline of the program is:\r\n\r\n```\r\nimport nlp as nlp_datasets\r\n...\r\nparser.add_argument('--dataset', help='HuggingFace Datasets id', default=['glue', 'sst2'], nargs='+')\r\n...\r\ndataset = nlp_datasets.load_dataset(*args.dataset)\r\n...\r\n# Create feature vocabs\r\nvocabs = create_vocabs(dataset.values(), vectorizers)\r\n...\r\n# Create a function to vectorize based on vectorizers and vocabs:\r\n\r\nprint('TS', train_set.num_rows)\r\nprint('VS', valid_set.num_rows)\r\nprint('ES', test_set.num_rows)\r\n\r\n# factory method to create a `convert_to_features` function based on vocabs\r\nconvert_to_features = create_featurizer(vectorizers, vocabs)\r\ntrain_set = train_set.map(convert_to_features, batched=True)\r\ntrain_set.set_format(type='torch', columns=list(vectorizers.keys()) + ['y', 'lengths'])\r\ntrain_loader = torch.utils.data.DataLoader(train_set, batch_size=args.batchsz)\r\n\r\nvalid_set = valid_set.map(convert_to_features, batched=True)\r\nvalid_set.set_format(type='torch', columns=list(vectorizers.keys()) + ['y', 'lengths'])\r\nvalid_loader = torch.utils.data.DataLoader(valid_set, batch_size=args.batchsz)\r\n\r\ntest_set = test_set.map(convert_to_features, batched=True)\r\ntest_set.set_format(type='torch', columns=list(vectorizers.keys()) + ['y', 'lengths'])\r\ntest_loader = torch.utils.data.DataLoader(test_set, batch_size=args.batchsz)\r\n\r\nprint('TS', train_set.num_rows)\r\nprint('VS', valid_set.num_rows)\r\nprint('ES', test_set.num_rows)\r\n\r\n```\r\nIm not sure if Im using it incorrectly, but the results are not what I expect.  Namely, the `.map()`  seems to grab the datset from the cache and then loses track of what the specific dataset is, instead using my training data for all datasets:\r\n\r\n```\r\nTS 67349\r\nVS 872\r\nES 1821\r\nTS 67349\r\nVS 67349\r\nES 67349\r\n```\r\n\r\nThe behavior changes if I turn off the caching but then the results fail:\r\n\r\n```\r\ntrain_set = train_set.map(convert_to_features, batched=True, load_from_cache_file=False)\r\n...\r\nvalid_set = valid_set.map(convert_to_features, batched=True, load_from_cache_file=False)\r\n...\r\ntest_set = test_set.map(convert_to_features, batched=True, load_from_cache_file=False)\r\n```\r\n\r\nNow I get the right set of features back...\r\n```\r\nTS 67349\r\nVS 872\r\nES 1821\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 68\/68 [00:00<00:00, 92.78it\/s]\r\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1\/1 [00:00<00:00, 75.47it\/s]\r\n  0%|          | 0\/2 [00:00\r\n    for x in train_loader:\r\n  File \"\/home\/dpressel\/anaconda3\/lib\/python3.7\/site-packages\/torch\/utils\/data\/dataloader.py\", line 345, in __next__\r\n    data = self._next_data()\r\n  File \"\/home\/dpressel\/anaconda3\/lib\/python3.7\/site-packages\/torch\/utils\/data\/dataloader.py\", line 385, in _next_data\r\n    data = self._dataset_fetcher.fetch(index)  # may raise StopIteration\r\n  File \"\/home\/dpressel\/anaconda3\/lib\/python3.7\/site-packages\/torch\/utils\/data\/_utils\/fetch.py\", line 44, in fetch\r\n    data = [self.dataset[idx] for idx in possibly_batched_index]\r\n  File \"\/home\/dpressel\/anaconda3\/lib\/python3.7\/site-packages\/torch\/utils\/data\/_utils\/fetch.py\", line 44, in \r\n    data = [self.dataset[idx] for idx in possibly_batched_index]\r\n  File \"\/home\/dpressel\/anaconda3\/lib\/python3.7\/site-packages\/nlp\/arrow_dataset.py\", line 338, in __getitem__\r\n    output_all_columns=self._output_all_columns,\r\n  File \"\/home\/dpressel\/anaconda3\/lib\/python3.7\/site-packages\/nlp\/arrow_dataset.py\", line 294, in _getitem\r\n    outputs = self._unnest(self._data.slice(key, 1).to_pydict())\r\n  File \"pyarrow\/table.pxi\", line 1211, in pyarrow.lib.Table.slice\r\n  File \"pyarrow\/public-api.pxi\", line 390, in pyarrow.lib.pyarrow_wrap_table\r\n  File \"pyarrow\/error.pxi\", line 85, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Column 3: In chunk 0: Invalid: Length spanned by list offsets (15859698) larger than values array (length 100000)\r\n\r\nProcess finished with exit code 1\r\n```\r\n\r\nThe full-example program (minus the print stmts) is here:\r\nhttps:\/\/github.com\/dpressel\/mead-baseline\/pull\/620\/files\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/160\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/160\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/159","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/159\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/159\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/159\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/159","id":620420700,"node_id":"MDU6SXNzdWU2MjA0MjA3MDA=","number":159,"title":"How can we add more datasets to nlp library?","user":{"login":"Tahsin-Mayeesha","id":17886829,"node_id":"MDQ6VXNlcjE3ODg2ODI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17886829?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Tahsin-Mayeesha","html_url":"https:\/\/github.com\/Tahsin-Mayeesha","followers_url":"https:\/\/api.github.com\/users\/Tahsin-Mayeesha\/followers","following_url":"https:\/\/api.github.com\/users\/Tahsin-Mayeesha\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Tahsin-Mayeesha\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Tahsin-Mayeesha\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Tahsin-Mayeesha\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Tahsin-Mayeesha\/orgs","repos_url":"https:\/\/api.github.com\/users\/Tahsin-Mayeesha\/repos","events_url":"https:\/\/api.github.com\/users\/Tahsin-Mayeesha\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Tahsin-Mayeesha\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-18T18:35:31Z","updated_at":"2020-05-18T18:37:08Z","closed_at":"2020-05-18T18:37:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/159\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/159\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/158","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/158\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/158\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/158\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/158","id":620396658,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE5NjUyNTQy","number":158,"title":"add Toronto Books Corpus","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-18T17:54:45Z","updated_at":"2020-06-11T07:49:15Z","closed_at":"2020-05-19T07:34:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/158","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/158","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/158.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/158.patch","merged_at":null},"body":"This PR adds the Toronto Books Corpus.\r\n.\r\nIt on consider TMX and plain text files (Moses) defined in the table **Statistics and TMX\/Moses Downloads** [here](http:\/\/opus.nlpl.eu\/Books.php )","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/158\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/158\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/157","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/157\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/157\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/157\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/157","id":620356542,"node_id":"MDU6SXNzdWU2MjAzNTY1NDI=","number":157,"title":"nlp.load_dataset() gives \"TypeError: list_() takes exactly one argument (2 given)\"","user":{"login":"saahiluppal","id":47444392,"node_id":"MDQ6VXNlcjQ3NDQ0Mzky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47444392?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/saahiluppal","html_url":"https:\/\/github.com\/saahiluppal","followers_url":"https:\/\/api.github.com\/users\/saahiluppal\/followers","following_url":"https:\/\/api.github.com\/users\/saahiluppal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/saahiluppal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/saahiluppal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/saahiluppal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/saahiluppal\/orgs","repos_url":"https:\/\/api.github.com\/users\/saahiluppal\/repos","events_url":"https:\/\/api.github.com\/users\/saahiluppal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/saahiluppal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":11,"created_at":"2020-05-18T16:46:38Z","updated_at":"2020-06-05T08:08:58Z","closed_at":"2020-06-05T08:08:58Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to load datasets from nlp but there seems to have error saying \r\n\"TypeError: list_() takes exactly one argument (2 given)\"\r\n\r\ngist can be found here\r\nhttps:\/\/gist.github.com\/saahiluppal\/c4b878f330b10b9ab9762bc0776c0a6a","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/157\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/157\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/156","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/156\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/156\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/156\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/156","id":620263687,"node_id":"MDU6SXNzdWU2MjAyNjM2ODc=","number":156,"title":"SyntaxError with WMT datasets","user":{"login":"tomhosking","id":9419158,"node_id":"MDQ6VXNlcjk0MTkxNTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9419158?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tomhosking","html_url":"https:\/\/github.com\/tomhosking","followers_url":"https:\/\/api.github.com\/users\/tomhosking\/followers","following_url":"https:\/\/api.github.com\/users\/tomhosking\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tomhosking\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tomhosking\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tomhosking\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tomhosking\/orgs","repos_url":"https:\/\/api.github.com\/users\/tomhosking\/repos","events_url":"https:\/\/api.github.com\/users\/tomhosking\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tomhosking\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2020-05-18T14:38:18Z","updated_at":"2020-07-23T16:41:55Z","closed_at":"2020-07-23T16:41:55Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The following snippet produces a syntax error:\r\n\r\n```\r\nimport nlp\r\n\r\ndataset = nlp.load_dataset('wmt14')\r\nprint(dataset['train'][0])\r\n```\r\n\r\n```\r\nTraceback (most recent call last):\r\n\r\n  File \"\/home\/tom\/.local\/lib\/python3.6\/site-packages\/IPython\/core\/interactiveshell.py\", line 3326, in run_code\r\n    exec(code_obj, self.user_global_ns, self.user_ns)\r\n\r\n  File \"\", line 3, in \r\n    dataset = nlp.load_dataset('wmt14')\r\n\r\n  File \"\/home\/tom\/.local\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 505, in load_dataset\r\n    builder_cls = import_main_class(module_path, dataset=True)\r\n\r\n  File \"\/home\/tom\/.local\/lib\/python3.6\/site-packages\/nlp\/load.py\", line 56, in import_main_class\r\n    module = importlib.import_module(module_path)\r\n\r\n  File \"\/usr\/lib\/python3.6\/importlib\/__init__.py\", line 126, in import_module\r\n    return _bootstrap._gcd_import(name[level:], package, level)\r\n\r\n  File \"\", line 994, in _gcd_import\r\n\r\n  File \"\", line 971, in _find_and_load\r\n\r\n  File \"\", line 955, in _find_and_load_unlocked\r\n\r\n  File \"\", line 665, in _load_unlocked\r\n\r\n  File \"\", line 678, in exec_module\r\n\r\n  File \"\", line 219, in _call_with_frames_removed\r\n\r\n  File \"\/home\/tom\/.local\/lib\/python3.6\/site-packages\/nlp\/datasets\/wmt14\/c258d646f4f5870b0245f783b7aa0af85c7117e06aacf1e0340bd81935094de2\/wmt14.py\", line 21, in \r\n    from .wmt_utils import Wmt, WmtConfig\r\n\r\n  File \"\/home\/tom\/.local\/lib\/python3.6\/site-packages\/nlp\/datasets\/wmt14\/c258d646f4f5870b0245f783b7aa0af85c7117e06aacf1e0340bd81935094de2\/wmt_utils.py\", line 659\r\n    <<<<<<< HEAD\r\n     ^\r\nSyntaxError: invalid syntax\r\n```\r\n\r\nPython version:\r\n`3.6.9 (default, Apr 18 2020, 01:56:04)  [GCC 8.4.0]`\r\nRunning on Ubuntu 18.04, via a Jupyter notebook","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/156\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/156\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/155","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/155\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/155\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/155\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/155","id":620067946,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE5Mzg1ODM0","number":155,"title":"Include more links in README, fix typos","user":{"login":"Bharat123rox","id":13381361,"node_id":"MDQ6VXNlcjEzMzgxMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13381361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Bharat123rox","html_url":"https:\/\/github.com\/Bharat123rox","followers_url":"https:\/\/api.github.com\/users\/Bharat123rox\/followers","following_url":"https:\/\/api.github.com\/users\/Bharat123rox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Bharat123rox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Bharat123rox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Bharat123rox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Bharat123rox\/orgs","repos_url":"https:\/\/api.github.com\/users\/Bharat123rox\/repos","events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-18T09:47:08Z","updated_at":"2020-05-28T08:31:57Z","closed_at":"2020-05-28T08:31:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/155","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/155","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/155.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/155.patch","merged_at":"2020-05-28T08:31:57Z"},"body":"Include more links and fix typos in README","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/155\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/155\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/154","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/154\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/154\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/154\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/154","id":620059066,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE5Mzc4Mzgw","number":154,"title":"add Ubuntu Dialogs Corpus datasets","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-18T09:34:48Z","updated_at":"2020-05-18T10:12:28Z","closed_at":"2020-05-18T10:12:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/154","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/154","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/154.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/154.patch","merged_at":"2020-05-18T10:12:27Z"},"body":"This PR adds the Ubuntu Dialog Corpus datasets version 2.0. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/154\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/154\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/153","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/153\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/153\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/153\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/153","id":619972246,"node_id":"MDU6SXNzdWU2MTk5NzIyNDY=","number":153,"title":"Meta-datasets (GLUE\/XTREME\/...) \u2013 Special care to attributions and citations","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-05-18T07:24:22Z","updated_at":"2020-05-18T21:18:16Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Meta-datasets are interesting in terms of standardized benchmarks but they also have specific behaviors, in particular in terms of attribution and authorship. It's very important that each specific dataset inside a meta dataset is properly referenced and the citation\/specific homepage\/etc are very visible and accessible and not only the generic citation of the meta-dataset itself.\r\n\r\nLet's take GLUE as an example:\r\n\r\nThe configuration has the citation for each dataset included (e.g. [here](https:\/\/github.com\/huggingface\/nlp\/blob\/master\/datasets\/glue\/glue.py#L154-L161)) but it should be copied inside the dataset info so that, when people access `dataset.info.citation` they get both the citation for GLUE and the citation for the specific datasets inside GLUE that they have loaded.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/153\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/153\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/152","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/152\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/152\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/152\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/152","id":619971900,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE5MzA4OTE2","number":152,"title":"Add GLUE config name check","user":{"login":"Bharat123rox","id":13381361,"node_id":"MDQ6VXNlcjEzMzgxMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13381361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Bharat123rox","html_url":"https:\/\/github.com\/Bharat123rox","followers_url":"https:\/\/api.github.com\/users\/Bharat123rox\/followers","following_url":"https:\/\/api.github.com\/users\/Bharat123rox\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Bharat123rox\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Bharat123rox\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Bharat123rox\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Bharat123rox\/orgs","repos_url":"https:\/\/api.github.com\/users\/Bharat123rox\/repos","events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Bharat123rox\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-05-18T07:23:43Z","updated_at":"2020-05-27T22:09:12Z","closed_at":"2020-05-27T22:09:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/152","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/152","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/152.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/152.patch","merged_at":null},"body":"Fixes #130 by adding a name check to the Glue class","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/152\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/152\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/151","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/151\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/151\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/151\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/151","id":619968480,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE5MzA2MTYz","number":151,"title":"Fix JSON tests.","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-18T07:17:38Z","updated_at":"2020-05-18T07:21:52Z","closed_at":"2020-05-18T07:21:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/151","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/151","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/151.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/151.patch","merged_at":"2020-05-18T07:21:51Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/151\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/151\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/150","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/150\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/150\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/150\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/150","id":619809645,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE5MTgyODU4","number":150,"title":"Add WNUT 17 NER dataset","user":{"login":"stefan-it","id":20651387,"node_id":"MDQ6VXNlcjIwNjUxMzg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20651387?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stefan-it","html_url":"https:\/\/github.com\/stefan-it","followers_url":"https:\/\/api.github.com\/users\/stefan-it\/followers","following_url":"https:\/\/api.github.com\/users\/stefan-it\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stefan-it\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stefan-it\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stefan-it\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stefan-it\/orgs","repos_url":"https:\/\/api.github.com\/users\/stefan-it\/repos","events_url":"https:\/\/api.github.com\/users\/stefan-it\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stefan-it\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-05-17T22:19:04Z","updated_at":"2020-05-26T20:37:59Z","closed_at":"2020-05-26T20:37:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/150","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/150","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/150.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/150.patch","merged_at":"2020-05-26T20:37:59Z"},"body":"Hi,\r\n\r\nthis PR adds the WNUT 17 dataset to `nlp`.\r\n\r\n> Emerging and Rare entity recognition\r\n> This shared task focuses on identifying unusual, previously-unseen entities in the context of emerging discussions. Named entities form the basis of many modern approaches to other tasks (like event clustering and summarisation), but recall on them is a real problem in noisy text - even among annotators. This drop tends to be due to novel entities and surface forms. Take for example the tweet \u201cso.. kktny in 30 mins?\u201d - even human experts find entity kktny hard to detect and resolve. This task will evaluate the ability to detect and classify novel, emerging, singleton named entities in noisy text.\r\n> \r\n> The goal of this task is to provide a definition of emerging and of rare entities, and based on that, also datasets for detecting these entities.\r\n\r\nMore information about the dataset can be found on the [shared task page](https:\/\/noisy-text.github.io\/2017\/emerging-rare-entities.html).\r\n\r\nDataset is taken is taken from their [GitHub repository](https:\/\/github.com\/leondz\/emerging_entities_17), because the data provided in this repository contains minor fixes in the dataset format.\r\n\r\n## Usage\r\n\r\nThen the WNUT 17 dataset can be used in `nlp` like this:\r\n\r\n```python\r\nimport nlp\r\n\r\nwnut_17 = nlp.load_dataset(\".\/datasets\/wnut_17\/wnut_17.py\")\r\n\r\nprint(wnut_17)\r\n```\r\n\r\nThis outputs:\r\n\r\n```txt\r\n'train': Dataset(schema: {'id': 'string', 'tokens': 'list', 'labels': 'list'}, num_rows: 3394)\r\n'validation': Dataset(schema: {'id': 'string', 'tokens': 'list', 'labels': 'list'}, num_rows: 1009)\r\n'test': Dataset(schema: {'id': 'string', 'tokens': 'list', 'labels': 'list'}, num_rows: 1287)\r\n```\r\n\r\nNumber are identical with the ones in [this paper](https:\/\/www.ijcai.org\/Proceedings\/2019\/0702.pdf) and are the same as using the `dataset` reader in Flair.\r\n\r\n## Features\r\n\r\nThe following feature format is used to represent a sentence in the WNUT 17 dataset:\r\n\r\n| Feature | Example | Description\r\n| ---- | ---- | -----------------\r\n| `id` | `0` | Number (id) of current sentence\r\n| `tokens` | `[\"AHFA\", \"extends\", \"deadline\"]` | List of tokens (strings) for a sentence\r\n| `labels` | `[\"B-group\", \"O\", \"O\"]` | List of labels (outer span)\r\n\r\nThe following labels are used in WNUT 17:\r\n\r\n```txt\r\nO\r\nB-corporation\r\nI-corporation\r\nB-location\r\nI-location\r\nB-product\r\nI-product\r\nB-person\r\nI-person\r\nB-group\r\nI-group\r\nB-creative-work\r\nI-creative-work\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/150\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":1,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/150\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/149","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/149\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/149\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/149\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/149","id":619735739,"node_id":"MDU6SXNzdWU2MTk3MzU3Mzk=","number":149,"title":"[Feature request] Add Ubuntu Dialogue Corpus dataset","user":{"login":"danth","id":28959268,"node_id":"MDQ6VXNlcjI4OTU5MjY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28959268?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danth","html_url":"https:\/\/github.com\/danth","followers_url":"https:\/\/api.github.com\/users\/danth\/followers","following_url":"https:\/\/api.github.com\/users\/danth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danth\/orgs","repos_url":"https:\/\/api.github.com\/users\/danth\/repos","events_url":"https:\/\/api.github.com\/users\/danth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danth\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-17T15:42:39Z","updated_at":"2020-05-18T17:01:46Z","closed_at":"2020-05-18T17:01:46Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"https:\/\/github.com\/rkadlec\/ubuntu-ranking-dataset-creator or http:\/\/dataset.cs.mcgill.ca\/ubuntu-corpus-1.0\/","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/149\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/149\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/148","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/148\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/148\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/148\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/148","id":619590555,"node_id":"MDU6SXNzdWU2MTk1OTA1NTU=","number":148,"title":"_download_and_prepare() got an unexpected keyword argument 'verify_infos'","user":{"login":"richarddwang","id":17963619,"node_id":"MDQ6VXNlcjE3OTYzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17963619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richarddwang","html_url":"https:\/\/github.com\/richarddwang","followers_url":"https:\/\/api.github.com\/users\/richarddwang\/followers","following_url":"https:\/\/api.github.com\/users\/richarddwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richarddwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richarddwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richarddwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richarddwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/richarddwang\/repos","events_url":"https:\/\/api.github.com\/users\/richarddwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richarddwang\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-17T01:48:53Z","updated_at":"2020-05-18T07:38:33Z","closed_at":"2020-05-18T07:38:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"# Reproduce\r\nIn Colab,\r\n```\r\n%pip install -q  nlp\r\n%pip install -q apache_beam mwparserfromhell\r\n\r\ndataset = nlp.load_dataset('wikipedia')\r\n```\r\nget\r\n```\r\nDownloading and preparing dataset wikipedia\/20200501.aa (download: Unknown size, generated: Unknown size, total: Unknown size) to \/root\/.cache\/huggingface\/datasets\/wikipedia\/20200501.aa\/1.0.0...\r\n\r\n---------------------------------------------------------------------------\r\n\r\nTypeError                                 Traceback (most recent call last)\r\n\r\n in ()\r\n----> 1 dataset = nlp.load_dataset('wikipedia')\r\n\r\n1 frames\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n    515         download_mode=download_mode,\r\n    516         ignore_verifications=ignore_verifications,\r\n--> 517         save_infos=save_infos,\r\n    518     )\r\n    519 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, dl_manager, **download_and_prepare_kwargs)\r\n    361                 verify_infos = not save_infos and not ignore_verifications\r\n    362                 self._download_and_prepare(\r\n--> 363                     dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n    364                 )\r\n    365                 # Sync info\r\n\r\nTypeError: _download_and_prepare() got an unexpected keyword argument 'verify_infos'\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/148\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/148\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/147","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/147\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/147\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/147\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/147","id":619581907,"node_id":"MDU6SXNzdWU2MTk1ODE5MDc=","number":147,"title":"Error with sklearn train_test_split","user":{"login":"ClonedOne","id":6853743,"node_id":"MDQ6VXNlcjY4NTM3NDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6853743?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ClonedOne","html_url":"https:\/\/github.com\/ClonedOne","followers_url":"https:\/\/api.github.com\/users\/ClonedOne\/followers","following_url":"https:\/\/api.github.com\/users\/ClonedOne\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ClonedOne\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ClonedOne\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ClonedOne\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ClonedOne\/orgs","repos_url":"https:\/\/api.github.com\/users\/ClonedOne\/repos","events_url":"https:\/\/api.github.com\/users\/ClonedOne\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ClonedOne\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-17T00:28:24Z","updated_at":"2020-06-18T16:23:23Z","closed_at":"2020-06-18T16:23:23Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It would be nice if we could use sklearn `train_test_split` to quickly generate subsets from the dataset objects returned by `nlp.load_dataset`. At the moment the code:\r\n\r\n```python\r\ndata = nlp.load_dataset('imdb', cache_dir=data_cache)\r\nf_half, s_half = train_test_split(data['train'], test_size=0.5, random_state=seed)\r\n```\r\nthrows:\r\n```\r\nValueError: Can only get row(s) (int or slice) or columns (string).\r\n```\r\nIt's not a big deal, since there are other ways to split the data, but it would be a cool thing to have.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/147\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/147\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/146","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/146\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/146\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/146\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/146","id":619564653,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE5MDI5MjUx","number":146,"title":"Add BERTScore to metrics","user":{"login":"felixgwu","id":7753366,"node_id":"MDQ6VXNlcjc3NTMzNjY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7753366?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/felixgwu","html_url":"https:\/\/github.com\/felixgwu","followers_url":"https:\/\/api.github.com\/users\/felixgwu\/followers","following_url":"https:\/\/api.github.com\/users\/felixgwu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/felixgwu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/felixgwu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/felixgwu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/felixgwu\/orgs","repos_url":"https:\/\/api.github.com\/users\/felixgwu\/repos","events_url":"https:\/\/api.github.com\/users\/felixgwu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/felixgwu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-16T22:09:39Z","updated_at":"2020-05-17T22:22:10Z","closed_at":"2020-05-17T22:22:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/146","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/146","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/146.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/146.patch","merged_at":"2020-05-17T22:22:09Z"},"body":"This PR adds [BERTScore](https:\/\/arxiv.org\/abs\/1904.09675) to metrics.\r\nHere is an example of how to use it.\r\n\r\n```sh\r\nimport nlp\r\nbertscore = nlp.load_metric('metrics\/bertscore') # or simply nlp.load_metric('bertscore') after this is added to huggingface's s3 bucket\r\npredictions = ['example', 'fruit']\r\nreferences = [['this is an example.', 'this is one example.'], ['apple']]\r\nresults = bertscore.compute(predictions, references, lang='en')\r\nprint(results)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/146\/reactions","total_count":3,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":3,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/146\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/145","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/145\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/145\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/145\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/145","id":619480549,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4OTcxMjg0","number":145,"title":"[AWS Tests] Follow-up PR from #144","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-16T13:53:46Z","updated_at":"2020-05-16T13:54:23Z","closed_at":"2020-05-16T13:54:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/145","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/145","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/145.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/145.patch","merged_at":"2020-05-16T13:54:22Z"},"body":"I forgot to add this line in PR #145 . ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/145\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/145\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/144","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/144\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/144\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/144\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/144","id":619477367,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4OTY5NjA1","number":144,"title":"[AWS tests] AWS test should not run for canonical datasets","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-16T13:39:30Z","updated_at":"2020-05-16T13:44:34Z","closed_at":"2020-05-16T13:44:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/144","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/144","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/144.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/144.patch","merged_at":"2020-05-16T13:44:33Z"},"body":"AWS tests should in general not run for canonical datasets. Only local tests will run in this case. This way a PR is able to pass when adding a new dataset.\r\n\r\nThis PR changes to logic to the following: \r\n\r\n1) All datasets that are present in `nlp\/datasets` are tested only locally. This way when one adds a canonical dataset, the PR includes his dataset in the tests.\r\n\r\n2) All datasets that are only present on AWS, such as `webis\/tl_dr` atm are tested only on AWS. \r\n\r\nI think the testing structure might need a bigger refactoring and better documentation very soon. \r\n\r\nMerging for now to unblock new PRs @thomwolf @mariamabarham .","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/144\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/144\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/143","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/143\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/143\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/143\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/143","id":619457641,"node_id":"MDU6SXNzdWU2MTk0NTc2NDE=","number":143,"title":"ArrowTypeError in squad metrics","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[{"id":2067393914,"node_id":"MDU6TGFiZWwyMDY3MzkzOTE0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20bug","name":"metric bug","color":"25b21e","default":false,"description":"A bug in a metric script"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-16T12:06:37Z","updated_at":"2020-05-22T13:38:52Z","closed_at":"2020-05-22T13:36:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"`squad_metric.compute` is giving following error\r\n```\r\nArrowTypeError: Could not convert [{'text': 'Denver Broncos'}, {'text': 'Denver Broncos'}, {'text': 'Denver Broncos'}] with type list: was not a dict, tuple, or recognized null value for conversion to struct type\r\n```\r\n\r\nThis is how my predictions and references look like\r\n```\r\npredictions[0]\r\n# {'id': '56be4db0acb8001400a502ec', 'prediction_text': 'Denver Broncos'}\r\n```\r\n\r\n```\r\nreferences[0]\r\n# {'answers': [{'text': 'Denver Broncos'},\r\n  {'text': 'Denver Broncos'},\r\n  {'text': 'Denver Broncos'}],\r\n 'id': '56be4db0acb8001400a502ec'}\r\n```\r\n\r\nThese are structured as per the `squad_metric.compute` help string.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/143\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/143\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/142","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/142\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/142\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/142\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/142","id":619450068,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4OTU0OTc1","number":142,"title":"[WMT] Add all wmt","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-16T11:28:46Z","updated_at":"2020-05-17T12:18:21Z","closed_at":"2020-05-17T12:18:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/142","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/142","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/142.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/142.patch","merged_at":"2020-05-17T12:18:20Z"},"body":"This PR adds all wmt datasets scripts. At the moment the script is **not** functional for the language pairs \"cs-en\", \"ru-en\", \"hi-en\" because apparently it takes up to a week to get the manual data for these datasets: see http:\/\/ufal.mff.cuni.cz\/czeng. \r\n\r\nThe datasets are fully functional though for the \"big\" language pairs \"de-en\" and \"fr-en\". \r\n\r\nOverall I think the scripts are very messy and might need a big refactoring at some point.\r\n\r\nFor now I think there are good to merge (most dataset configs can be used). I will add \"cs\", \"ru\" and \"hi\" when the manual data is available. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/142\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/142\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/141","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/141\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/141\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/141\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/141","id":619447090,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4OTUzMzQw","number":141,"title":"[Clean up] remove bogus folder","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-16T11:13:42Z","updated_at":"2020-05-16T13:24:27Z","closed_at":"2020-05-16T13:24:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/141","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/141","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/141.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/141.patch","merged_at":"2020-05-16T13:24:25Z"},"body":"@mariamabarham  - I think you accidentally placed it there.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/141\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/141\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/140","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/140\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/140\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/140\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/140","id":619443613,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4OTUxMzg4","number":140,"title":"[Tests] run local tests as default","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-16T10:56:06Z","updated_at":"2020-05-16T13:21:44Z","closed_at":"2020-05-16T13:21:43Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/140","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/140","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/140.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/140.patch","merged_at":"2020-05-16T13:21:43Z"},"body":"This PR also enables local tests by default\r\n\r\nI think it's safer for now to enable both local and aws tests for every commit. The problem currently is that when we do a PR to add a dataset, the dataset is not yet on AWS on therefore not tested on the PR itself. Thus the PR will always be green even if the datasets are not correct. This PR aims at fixing this.\r\n\r\n## Suggestion on how to commit to the repo from now on:\r\nNow since the repo is \"online\", I think we should adopt a couple of best practices:\r\n1) - No direct committing to the repo anymore. Every change should be opened in a PR and be well documented so that we can find it later\r\n2) - Every PR has to be reviewed by at least x people (I guess @thomwolf you should decide here) because we now have to be much more careful when doing changes to the API for backward compatibility, etc...\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/140\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/140\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/139","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/139\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/139\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/139\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/139","id":619327409,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4ODc4NzMy","number":139,"title":"Add GermEval 2014 NER dataset","user":{"login":"stefan-it","id":20651387,"node_id":"MDQ6VXNlcjIwNjUxMzg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20651387?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stefan-it","html_url":"https:\/\/github.com\/stefan-it","followers_url":"https:\/\/api.github.com\/users\/stefan-it\/followers","following_url":"https:\/\/api.github.com\/users\/stefan-it\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stefan-it\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stefan-it\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stefan-it\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stefan-it\/orgs","repos_url":"https:\/\/api.github.com\/users\/stefan-it\/repos","events_url":"https:\/\/api.github.com\/users\/stefan-it\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stefan-it\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2020-05-15T23:42:09Z","updated_at":"2020-05-16T13:56:37Z","closed_at":"2020-05-16T13:56:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/139","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/139","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/139.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/139.patch","merged_at":"2020-05-16T13:56:22Z"},"body":"Hi, \r\n\r\nthis PR adds the GermEval 2014 NER dataset \ud83d\ude03\r\n\r\n> The GermEval 2014 NER Shared Task builds on a new dataset with German Named Entity annotation [1] with the following properties:\r\n\r\n> - The data was sampled from German Wikipedia and News Corpora as a collection of citations.\r\n> - The dataset covers over 31,000 sentences corresponding to over 590,000 tokens.\r\n> - The NER annotation uses the NoSta-D guidelines, which extend the T\u00fcbingen Treebank guidelines, using four main NER categories with sub-structure, and annotating embeddings among NEs such as [ORG FC Kickers [LOC Darmstadt]].\r\n\r\nDataset will be downloaded from the [official GermEval 2014 website](https:\/\/sites.google.com\/site\/germeval2014ner\/data).\r\n\r\n## Dataset format\r\n\r\nHere's an example of the dataset format from the original dataset:\r\n\r\n```tsv\r\n# http:\/\/de.wikipedia.org\/wiki\/Manfred_Korfmann [2009-10-17]\r\n1 Aufgrund O O\r\n2 seiner O O\r\n3 Initiative O O\r\n4 fand O O\r\n5 2001\/2002 O O\r\n6 in O O\r\n7 Stuttgart B-LOC O\r\n8 , O O\r\n9 Braunschweig B-LOC O\r\n10 und O O\r\n11 Bonn B-LOC O\r\n12 eine O O\r\n13 gro\u00dfe O O\r\n14 und O O\r\n15 publizistisch O O\r\n16 vielbeachtete O O\r\n17 Troia-Ausstellung B-LOCpart O\r\n18 statt O O\r\n19 , O O\r\n20 \u201e O O\r\n21 Troia B-OTH B-LOC\r\n22 - I-OTH O\r\n23 Traum I-OTH O\r\n24 und I-OTH O\r\n25 Wirklichkeit I-OTH O\r\n26 \u201c O O\r\n27 . O O\r\n```\r\n\r\nThe sentence is encoded as one token per line (tab separated columns.\r\n\r\nThe first column contains either a `#`, which signals the source the sentence is cited from and the date it was retrieved, or the token number within the sentence.\r\n\r\nThe second column contains the token.\r\n\r\nColumn three and four contain the named entity (in IOB2 scheme).\r\nOuter spans are encoded in the third column, embedded\/nested spans in the fourth column.\r\n\r\n## Features\r\n\r\nI decided to keep most information from the dataset. That means the so called \"source\" information (where the sentences come from + date information) is also returned for each sentence in the feature vector.\r\n\r\nFor each sentence in the dataset, one feature vector (`nlp.Features` definition) will be returned:\r\n\r\n| Feature | Example | Description\r\n| ---- | ---- | -----------------\r\n| `id` | `0` | Number (id) of current sentence\r\n| `source` | `http:\/\/de.wikipedia.org\/wiki\/Manfred_Korfmann [2009-10-17]` | URL and retrieval date as string\r\n| `tokens` | `[\"Schwartau\", \"sagte\", \":\"]` | List of tokens (strings) for a sentence\r\n| `labels` | `[\"B-PER\", \"O\", \"O\"]` | List of labels (outer span)\r\n| `nested-labels` | `[\"O\", \"O\", \"O\"]` | List of labels for nested span\r\n\r\n## Example\r\n\r\nThe following command downloads the dataset from the official GermEval 2014 page and pre-processed it:\r\n\r\n```bash\r\npython nlp-cli test datasets\/germeval_14 --all_configs\r\n```\r\n\r\nIt then outputs the number for training, development and testset. The training set consists of 24,000 sentences, the development set of 2,200 and the test of 5,100 sentences.\r\n\r\nNow it can be imported and used with `nlp`:\r\n\r\n```python\r\nimport nlp\r\n\r\ngermeval = nlp.load_dataset(\".\/datasets\/germeval_14\/germeval_14.py\")\r\nassert len(germeval[\"train\"]) == 24000\r\n\r\n# Show first sentence of training set:\r\ngermeval[\"train\"][0]\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/139\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/139\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/138","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/138\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/138\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/138\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/138","id":619225191,"node_id":"MDU6SXNzdWU2MTkyMjUxOTE=","number":138,"title":"Consider renaming to nld","user":{"login":"honnibal","id":8059750,"node_id":"MDQ6VXNlcjgwNTk3NTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8059750?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/honnibal","html_url":"https:\/\/github.com\/honnibal","followers_url":"https:\/\/api.github.com\/users\/honnibal\/followers","following_url":"https:\/\/api.github.com\/users\/honnibal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/honnibal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/honnibal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/honnibal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/honnibal\/orgs","repos_url":"https:\/\/api.github.com\/users\/honnibal\/repos","events_url":"https:\/\/api.github.com\/users\/honnibal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/honnibal\/received_events","type":"User","site_admin":false},"labels":[{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2020-05-15T20:23:27Z","updated_at":"2020-12-17T20:56:31Z","closed_at":"2020-09-28T00:08:10Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hey :)\r\n\r\nJust making a thread here recording what I said on Twitter, as it's impossible to follow discussion there. It's also just really not a good way to talk about this sort of thing.\r\n\r\nThe issue is that modules go into the global namespace, so you shouldn't use variable names that conflict with module names. This means the package makes `nlp` a bad variable name everywhere in the codebase. I've always used `nlp` as the canonical variable name of spaCy's `Language` objects, and this is a convention that a lot of other code has followed (Stanza, flair, etc). And actually, your `transformers` library uses `nlp` as the name for its `Pipeline` instance in your readme.\r\n\r\nIf you stick with the `nlp` name for this package, if anyone uses it then they should rewrite all of that code. If `nlp` is a bad choice of variable anywhere, it's a bad choice of variable everywhere --- because you shouldn't have to notice whether some other function uses a module when you're naming variables within a function. You want to have one convention that you can stick to everywhere.\r\n\r\nIf people use your `nlp` package and continue to use the `nlp` variable name, they'll find themselves with confusing bugs. There will be many many bits of code cut-and-paste from tutorials that give confusing results when combined with the data loading from the `nlp` library. The problem will be especially bad for shadowed modules (people might reasonably have a module named `nlp.py` within their codebase) and notebooks, as people might run notebook cells for data loading out-of-order.\r\n\r\nI don't think it's an exaggeration to say that if your library becomes popular, we'll all be answering issues around this about once a week for the next few years. That seems pretty unideal, so I do hope you'll reconsider.\r\n\r\nI suggest `nld` as a better name. It more accurately represents what the package actually does. It's pretty unideal to have a package named `nlp` that doesn't do any processing, and contains data about natural language generation or other non-NLP tasks. The name is equally short, and is sort of a visual pun on `nlp`, since a d is a rotated p.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/138\/reactions","total_count":32,"+1":32,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/138\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/137","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/137\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/137\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/137\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/137","id":619214645,"node_id":"MDU6SXNzdWU2MTkyMTQ2NDU=","number":137,"title":"Tokenized BLEU considered harmful - Discussion on community-based process","user":{"login":"kpu","id":247512,"node_id":"MDQ6VXNlcjI0NzUxMg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/247512?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kpu","html_url":"https:\/\/github.com\/kpu","followers_url":"https:\/\/api.github.com\/users\/kpu\/followers","following_url":"https:\/\/api.github.com\/users\/kpu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kpu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kpu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kpu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kpu\/orgs","repos_url":"https:\/\/api.github.com\/users\/kpu\/repos","events_url":"https:\/\/api.github.com\/users\/kpu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kpu\/received_events","type":"User","site_admin":false},"labels":[{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"},{"id":2067400959,"node_id":"MDU6TGFiZWwyMDY3NDAwOTU5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Metric%20discussion","name":"Metric discussion","color":"d722e8","default":false,"description":"Discussions on the metrics"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2020-05-15T20:08:34Z","updated_at":"2021-01-07T10:41:28Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"https:\/\/github.com\/huggingface\/nlp\/blob\/7d1526dfeeb29248d832f1073192dbf03ad642da\/metrics\/bleu\/bleu.py#L76 assumes the inputs are tokenized by the user.  This is bad practice because the user's tokenizer is usually not the same as the one used by `mteval-v13a.pl`, the closest thing we have to a standard.  Moreover, tokenizers are like window managers: they can be endlessly customized and nobody has quite the same options.  \r\n\r\nAs @mjpost reported in https:\/\/www.aclweb.org\/anthology\/W18-6319.pdf BLEU configurations can vary by 1.8.  Yet people are incorrectly putting non-comparable BLEU scores in the same table, such as Table 1 in https:\/\/arxiv.org\/abs\/2004.04902 .  \r\n\r\nThere are a few use cases for tokenized BLEU like Thai.  For Chinese, people seem to use character BLEU for better or worse.\r\n\r\nThe default easy option should be the one that's correct more often.  And that is sacrebleu.  Please don't make it easy for people to run what is usually the wrong option; it definitely shouldn't be `bleu`.  \r\n\r\nAlso, I know this is inherited from TensorFlow and, paging @lmthang, they should discourage it too.  ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/137\/reactions","total_count":12,"+1":12,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/137\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/136","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/136\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/136\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/136\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/136","id":619211018,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4NzgxNzI4","number":136,"title":"Update README.md","user":{"login":"renaud","id":75369,"node_id":"MDQ6VXNlcjc1MzY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75369?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/renaud","html_url":"https:\/\/github.com\/renaud","followers_url":"https:\/\/api.github.com\/users\/renaud\/followers","following_url":"https:\/\/api.github.com\/users\/renaud\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/renaud\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/renaud\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/renaud\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/renaud\/orgs","repos_url":"https:\/\/api.github.com\/users\/renaud\/repos","events_url":"https:\/\/api.github.com\/users\/renaud\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/renaud\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-15T20:01:07Z","updated_at":"2020-05-17T12:17:28Z","closed_at":"2020-05-17T12:17:28Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/136","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/136","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/136.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/136.patch","merged_at":null},"body":"small typo","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/136\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/136\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/135","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/135\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/135\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/135\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/135","id":619206708,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4Nzc4MTMw","number":135,"title":"Fix print statement in READ.md","user":{"login":"codehunk628","id":51091425,"node_id":"MDQ6VXNlcjUxMDkxNDI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/51091425?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/codehunk628","html_url":"https:\/\/github.com\/codehunk628","followers_url":"https:\/\/api.github.com\/users\/codehunk628\/followers","following_url":"https:\/\/api.github.com\/users\/codehunk628\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/codehunk628\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/codehunk628\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/codehunk628\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/codehunk628\/orgs","repos_url":"https:\/\/api.github.com\/users\/codehunk628\/repos","events_url":"https:\/\/api.github.com\/users\/codehunk628\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/codehunk628\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-15T19:52:23Z","updated_at":"2020-05-17T12:14:06Z","closed_at":"2020-05-17T12:14:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/135","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/135","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/135.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/135.patch","merged_at":"2020-05-17T12:14:05Z"},"body":"print statement was throwing generator object instead of printing names of available datasets\/metrics","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/135\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/135\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/134","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/134\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/134\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/134\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/134","id":619112641,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4Njk5OTYz","number":134,"title":"Update README.md","user":{"login":"pranv","id":8753078,"node_id":"MDQ6VXNlcjg3NTMwNzg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8753078?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pranv","html_url":"https:\/\/github.com\/pranv","followers_url":"https:\/\/api.github.com\/users\/pranv\/followers","following_url":"https:\/\/api.github.com\/users\/pranv\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pranv\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pranv\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pranv\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pranv\/orgs","repos_url":"https:\/\/api.github.com\/users\/pranv\/repos","events_url":"https:\/\/api.github.com\/users\/pranv\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pranv\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-15T16:56:14Z","updated_at":"2020-05-28T08:21:49Z","closed_at":"2020-05-28T08:21:49Z","author_association":"NONE","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/134","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/134","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/134.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/134.patch","merged_at":null},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/134\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/134\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/133","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/133\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/133\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/133\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/133","id":619094954,"node_id":"MDU6SXNzdWU2MTkwOTQ5NTQ=","number":133,"title":"[Question] Using\/adding a local dataset","user":{"login":"zphang","id":1668462,"node_id":"MDQ6VXNlcjE2Njg0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1668462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zphang","html_url":"https:\/\/github.com\/zphang","followers_url":"https:\/\/api.github.com\/users\/zphang\/followers","following_url":"https:\/\/api.github.com\/users\/zphang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zphang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zphang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zphang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zphang\/orgs","repos_url":"https:\/\/api.github.com\/users\/zphang\/repos","events_url":"https:\/\/api.github.com\/users\/zphang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zphang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-05-15T16:26:06Z","updated_at":"2020-07-23T16:44:09Z","closed_at":"2020-07-23T16:44:09Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Users may want to either create\/modify a local copy of a dataset, or use a custom-built dataset with the same `Dataset` API as externally downloaded datasets.\r\n\r\nIt appears to be possible to point to a local dataset path rather than downloading the external ones, but I'm not exactly sure how to go about doing this.\r\n\r\nA notebook\/example script demonstrating this would be very helpful.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/133\/reactions","total_count":6,"+1":6,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/133\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/132","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/132\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/132\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/132\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/132","id":619077851,"node_id":"MDU6SXNzdWU2MTkwNzc4NTE=","number":132,"title":"[Feature Request] Add the OpenWebText dataset","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-15T15:57:29Z","updated_at":"2020-10-07T14:22:48Z","closed_at":"2020-10-07T14:22:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The OpenWebText dataset is an open clone of OpenAI's WebText dataset. It can be used to train ELECTRA as is specified in the [README](https:\/\/www.github.com\/google-research\/electra).\r\n\r\nMore information and the download link are available [here](https:\/\/skylion007.github.io\/OpenWebTextCorpus\/).","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/132\/reactions","total_count":2,"+1":2,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/132\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/131","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/131\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/131\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/131\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/131","id":619073731,"node_id":"MDU6SXNzdWU2MTkwNzM3MzE=","number":131,"title":"[Feature request] Add Toronto BookCorpus dataset","user":{"login":"jarednielsen","id":4564897,"node_id":"MDQ6VXNlcjQ1NjQ4OTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4564897?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jarednielsen","html_url":"https:\/\/github.com\/jarednielsen","followers_url":"https:\/\/api.github.com\/users\/jarednielsen\/followers","following_url":"https:\/\/api.github.com\/users\/jarednielsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jarednielsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jarednielsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jarednielsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jarednielsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/jarednielsen\/repos","events_url":"https:\/\/api.github.com\/users\/jarednielsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jarednielsen\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-15T15:50:44Z","updated_at":"2020-06-28T21:27:31Z","closed_at":"2020-06-28T21:27:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I know the copyright\/distribution of this one is complex, but it would be great to have! That, combined with the existing `wikitext`, would provide a complete dataset for pretraining models like BERT.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/131\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/131\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/130","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/130\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/130\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/130\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/130","id":619035440,"node_id":"MDU6SXNzdWU2MTkwMzU0NDA=","number":130,"title":"Loading GLUE dataset loads CoLA by default","user":{"login":"zphang","id":1668462,"node_id":"MDQ6VXNlcjE2Njg0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1668462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zphang","html_url":"https:\/\/github.com\/zphang","followers_url":"https:\/\/api.github.com\/users\/zphang\/followers","following_url":"https:\/\/api.github.com\/users\/zphang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zphang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zphang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zphang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zphang\/orgs","repos_url":"https:\/\/api.github.com\/users\/zphang\/repos","events_url":"https:\/\/api.github.com\/users\/zphang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zphang\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-15T14:55:50Z","updated_at":"2020-05-27T22:08:15Z","closed_at":"2020-05-27T22:08:15Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"If I run:\r\n\r\n```python\r\ndataset = nlp.load_dataset('glue')\r\n```\r\nThe resultant dataset seems to be CoLA be default, without throwing any error. This is in contrast to calling:\r\n\r\n```python\r\nmetric = nlp.load_metric(\"glue\")\r\n```\r\nwhich throws an error telling the user that they need to specify a task in GLUE. Should the same apply for loading datasets?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/130\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/130\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/129","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/129\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/129\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/129\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/129","id":618997725,"node_id":"MDU6SXNzdWU2MTg5OTc3MjU=","number":129,"title":"[Feature request] Add Google Natural Question dataset","user":{"login":"elyase","id":1175888,"node_id":"MDQ6VXNlcjExNzU4ODg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1175888?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/elyase","html_url":"https:\/\/github.com\/elyase","followers_url":"https:\/\/api.github.com\/users\/elyase\/followers","following_url":"https:\/\/api.github.com\/users\/elyase\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/elyase\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/elyase\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/elyase\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/elyase\/orgs","repos_url":"https:\/\/api.github.com\/users\/elyase\/repos","events_url":"https:\/\/api.github.com\/users\/elyase\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/elyase\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2020-05-15T14:14:20Z","updated_at":"2020-07-23T13:21:29Z","closed_at":"2020-07-23T13:21:29Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Would be great to have https:\/\/github.com\/google-research-datasets\/natural-questions as an alternative to SQuAD.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/129\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/129\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/128","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/128\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/128\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/128\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/128","id":618951117,"node_id":"MDU6SXNzdWU2MTg5NTExMTc=","number":128,"title":"Some error inside nlp.load_dataset()","user":{"login":"polkaYK","id":18486287,"node_id":"MDQ6VXNlcjE4NDg2Mjg3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18486287?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/polkaYK","html_url":"https:\/\/github.com\/polkaYK","followers_url":"https:\/\/api.github.com\/users\/polkaYK\/followers","following_url":"https:\/\/api.github.com\/users\/polkaYK\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/polkaYK\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/polkaYK\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/polkaYK\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/polkaYK\/orgs","repos_url":"https:\/\/api.github.com\/users\/polkaYK\/repos","events_url":"https:\/\/api.github.com\/users\/polkaYK\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/polkaYK\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-15T13:01:29Z","updated_at":"2020-05-15T13:10:40Z","closed_at":"2020-05-15T13:10:40Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"First of all, nice work!\r\n\r\nI am going through [this overview notebook](https:\/\/colab.research.google.com\/github\/huggingface\/nlp\/blob\/master\/notebooks\/Overview.ipynb)\r\n\r\nIn simple step `dataset = nlp.load_dataset('squad', split='validation[:10%]')`\r\n\r\nI get an error, which is connected with some inner code, I think:\r\n```\r\n---------------------------------------------------------------------------\r\n\r\nTypeError                                 Traceback (most recent call last)\r\n\r\n in ()\r\n      1 # Downloading and loading a dataset\r\n      2 \r\n----> 3 dataset = nlp.load_dataset('squad', split='validation[:10%]')\r\n\r\n8 frames\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/load.py in load_dataset(path, name, version, data_dir, data_files, split, cache_dir, download_config, download_mode, ignore_verifications, save_infos, **config_kwargs)\r\n    515         download_mode=download_mode,\r\n    516         ignore_verifications=ignore_verifications,\r\n--> 517         save_infos=save_infos,\r\n    518     )\r\n    519 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, save_infos, dl_manager, **download_and_prepare_kwargs)\r\n    361                 verify_infos = not save_infos and not ignore_verifications\r\n    362                 self._download_and_prepare(\r\n--> 363                     dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n    364                 )\r\n    365                 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n    414             try:\r\n    415                 # Prepare split will record examples associated to the split\r\n--> 416                 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n    417             except OSError:\r\n    418                 raise OSError(\"Cannot find data file. \" + (self.MANUAL_DOWNLOAD_INSTRUCTIONS or \"\"))\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/builder.py in _prepare_split(self, split_generator)\r\n    585         fname = \"{}-{}.arrow\".format(self.name, split_generator.name)\r\n    586         fpath = os.path.join(self._cache_dir, fname)\r\n--> 587         examples_type = self.info.features.type\r\n    588         writer = ArrowWriter(data_type=examples_type, path=fpath, writer_batch_size=self._writer_batch_size)\r\n    589 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/features.py in type(self)\r\n    460     @property\r\n    461     def type(self):\r\n--> 462         return get_nested_type(self)\r\n    463 \r\n    464     @classmethod\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/features.py in get_nested_type(schema)\r\n    370     # Nested structures: we allow dict, list\/tuples, sequences\r\n    371     if isinstance(schema, dict):\r\n--> 372         return pa.struct({key: get_nested_type(value) for key, value in schema.items()})\r\n    373     elif isinstance(schema, (list, tuple)):\r\n    374         assert len(schema) == 1, \"We defining list feature, you should just provide one example of the inner type\"\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/features.py in (.0)\r\n    370     # Nested structures: we allow dict, list\/tuples, sequences\r\n    371     if isinstance(schema, dict):\r\n--> 372         return pa.struct({key: get_nested_type(value) for key, value in schema.items()})\r\n    373     elif isinstance(schema, (list, tuple)):\r\n    374         assert len(schema) == 1, \"We defining list feature, you should just provide one example of the inner type\"\r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/features.py in get_nested_type(schema)\r\n    379         # We allow to reverse list of dict => dict of list for compatiblity with tfds\r\n    380         if isinstance(inner_type, pa.StructType):\r\n--> 381             return pa.struct(dict((f.name, pa.list_(f.type, schema.length)) for f in inner_type))\r\n    382         return pa.list_(inner_type, schema.length)\r\n    383 \r\n\r\n\/usr\/local\/lib\/python3.6\/dist-packages\/nlp\/features.py in (.0)\r\n    379         # We allow to reverse list of dict => dict of list for compatiblity with tfds\r\n    380         if isinstance(inner_type, pa.StructType):\r\n--> 381             return pa.struct(dict((f.name, pa.list_(f.type, schema.length)) for f in inner_type))\r\n    382         return pa.list_(inner_type, schema.length)\r\n    383 \r\n\r\nTypeError: list_() takes exactly one argument (2 given)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/128\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/128\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/127","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/127\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/127\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/127\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/127","id":618909042,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4NTQ1MDcy","number":127,"title":"Update Overview.ipynb","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-15T11:46:48Z","updated_at":"2020-05-15T11:47:27Z","closed_at":"2020-05-15T11:47:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/127","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/127","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/127.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/127.patch","merged_at":"2020-05-15T11:47:25Z"},"body":"update notebook","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/127\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/127\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/126","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/126\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/126\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/126\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/126","id":618897499,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4NTM1Mzc5","number":126,"title":"remove webis","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-15T11:25:20Z","updated_at":"2020-05-15T11:31:24Z","closed_at":"2020-05-15T11:30:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/126","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/126","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/126.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/126.patch","merged_at":"2020-05-15T11:30:26Z"},"body":"Remove webis from dataset folder.\r\n\r\nOur first dataset script that only lives on AWS :-) https:\/\/s3.console.aws.amazon.com\/s3\/buckets\/datasets.huggingface.co\/nlp\/datasets\/webis\/tl_dr\/?region=us-east-1 @julien-c @jplu ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/126\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/126\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/125","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/125\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/125\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/125\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/125","id":618869048,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4NTExNDE0","number":125,"title":"[Newsroom] add newsroom","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-15T10:34:34Z","updated_at":"2020-05-15T10:37:07Z","closed_at":"2020-05-15T10:37:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/125","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/125","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/125.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/125.patch","merged_at":"2020-05-15T10:37:02Z"},"body":"I checked it with the data link of the mail you forwarded @thomwolf => works well!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/125\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/125\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/124","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/124\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/124\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/124\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/124","id":618864284,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4NTA3NDUx","number":124,"title":"Xsum, require manual download of some files","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-15T10:26:13Z","updated_at":"2020-05-15T11:04:48Z","closed_at":"2020-05-15T11:04:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/124","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/124","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/124.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/124.patch","merged_at":"2020-05-15T11:04:46Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/124\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/124\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/123","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/123\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/123\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/123\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/123","id":618820140,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4NDcxODU5","number":123,"title":"[Tests] Local => aws","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-15T09:12:25Z","updated_at":"2020-05-15T10:06:12Z","closed_at":"2020-05-15T10:03:26Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/123","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/123","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/123.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/123.patch","merged_at":"2020-05-15T10:03:26Z"},"body":"## Change default Test from local => aws\r\n\r\nAs a default we set` aws=True`, `Local=False`, `slow=False`\r\n\r\n### 1. RUN_AWS=1 (default)\r\nThis runs 4 tests per dataset script.\r\n\r\na) Does the dataset script have a valid etag \/ Can it be reached on AWS? \r\nb) Can we load its `builder_class`?\r\nc) Can we load **all** dataset configs?\r\nd) _Most importantly_: Can we load the dataset? \r\n\r\nImportant - we currently only test the first config of each dataset to reduce test time. Total test time is around 1min20s.\r\n\r\n### 2. RUN_LOCAL=1 RUN_AWS=0\r\n\r\n***This should be done when debugging dataset scripts of the .\/datasets folder***\r\n\r\nThis only runs 1 test per dataset test, which is equivalent to aws d) - Can we load the dataset from the local `datasets` directory?\r\n\r\n### 3. RUN_SLOW=1\r\n\r\nWe should set up to run these tests maybe 1 time per week ? @thomwolf \r\n\r\nThe `slow` tests include two more important tests. \r\n\r\ne) Can we load the dataset with all possible configs? This test will probably fail at the moment because a lot of dummy data is missing. We should add the dummy data step by step to be sure that all configs work.\r\n\r\nf) Test that the actual dataset can be loaded. This will take quite some time to run, but is important to make sure that the \"real\" data can be loaded. It will also test whether the dataset script has the correct checksums file which is currently not tested with `aws=True`. @lhoestq - is there an easy way to check cheaply whether the `dataset_info.json` is correct for each dataset script? ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/123\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/123\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/122","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/122\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/122\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/122\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/122","id":618813182,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4NDY2Mzc3","number":122,"title":"Final cleanup of readme and metrics","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-15T09:00:52Z","updated_at":"2021-09-03T19:40:09Z","closed_at":"2020-05-15T09:02:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/122","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/122","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/122.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/122.patch","merged_at":"2020-05-15T09:02:22Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/122\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/122\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/121","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/121\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/121\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/121\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/121","id":618790040,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4NDQ4MTkx","number":121,"title":"make style","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-15T08:23:36Z","updated_at":"2020-05-15T08:25:39Z","closed_at":"2020-05-15T08:25:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/121","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/121","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/121.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/121.patch","merged_at":"2020-05-15T08:25:38Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/121\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/121\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/120","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/120\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/120\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/120\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/120","id":618737783,"node_id":"MDU6SXNzdWU2MTg3Mzc3ODM=","number":120,"title":"\ud83d\udc1b `map` not working","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-15T06:43:08Z","updated_at":"2020-05-15T07:02:38Z","closed_at":"2020-05-15T07:02:38Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to run a basic example (mapping function to add a prefix).  \r\n[Here is the colab notebook I'm using.](https:\/\/colab.research.google.com\/drive\/1YH4JCAy0R1MMSc-k_Vlik_s1LEzP_t1h?usp=sharing)\r\n\r\n```python\r\nimport nlp\r\n\r\ndataset = nlp.load_dataset('squad', split='validation[:10%]')\r\n\r\ndef test(sample):\r\n    sample['title'] = \"test prefix @@@ \" + sample[\"title\"]\r\n    return sample\r\n\r\nprint(dataset[0]['title'])\r\ndataset.map(test)\r\nprint(dataset[0]['title'])\r\n```\r\nOutput :\r\n> Super_Bowl_50\r\nSuper_Bowl_50\r\n\r\nExpected output :\r\n> Super_Bowl_50\r\ntest prefix @@@ Super_Bowl_50","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/120\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/120\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/119","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/119\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/119\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/119\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/119","id":618652145,"node_id":"MDU6SXNzdWU2MTg2NTIxNDU=","number":119,"title":"\ud83d\udc1b Colab : type object 'pyarrow.lib.RecordBatch' has no attribute 'from_struct_array'","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-15T02:27:26Z","updated_at":"2020-05-15T05:11:22Z","closed_at":"2020-05-15T02:45:28Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to load CNN\/DM dataset on Colab.\r\n\r\n[Colab notebook](https:\/\/colab.research.google.com\/drive\/11Mf7iNhIyt6GpgA1dBEtg3cyMHmMhtZS?usp=sharing)\r\n\r\nBut I meet this error :\r\n\r\n> AttributeError: type object 'pyarrow.lib.RecordBatch' has no attribute 'from_struct_array'\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/119\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/119\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/118","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/118\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/118\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/118\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/118","id":618643088,"node_id":"MDU6SXNzdWU2MTg2NDMwODg=","number":118,"title":"\u2753 How to apply a map to all subsets ?","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-15T01:58:52Z","updated_at":"2020-05-15T07:05:49Z","closed_at":"2020-05-15T07:04:25Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm working with CNN\/DM dataset, where I have 3 subsets : `train`, `test`, `validation`.\r\n\r\nShould I apply my map function on the subsets one by one ?\r\n\r\n```python\r\nimport nlp\r\n\r\ncnn_dm = nlp.load_dataset('cnn_dailymail')\r\nfor corpus in ['train', 'test', 'validation']:\r\n         cnn_dm[corpus] = cnn_dm[corpus].map(my_func)\r\n```\r\n\r\nOr is there a better way to do this ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/118\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/118\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/117","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/117\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/117\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/117\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/117","id":618632573,"node_id":"MDU6SXNzdWU2MTg2MzI1NzM=","number":117,"title":"\u2753 How to remove specific rows of a dataset ?","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-15T01:25:06Z","updated_at":"2021-05-14T04:02:19Z","closed_at":"2020-05-15T07:04:32Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I saw on the [example notebook](https:\/\/colab.research.google.com\/github\/huggingface\/nlp\/blob\/master\/notebooks\/Overview.ipynb#scrollTo=efFhDWhlvSVC) how to remove a specific column :\r\n\r\n```python\r\ndataset.drop('id')\r\n```\r\n\r\nBut I didn't find how to remove a specific row. \r\n\r\n**For example, how can I remove all sample with `id` < 10 ?**","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/117\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/117\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/116","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/116\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/116\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/116\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/116","id":618628264,"node_id":"MDU6SXNzdWU2MTg2MjgyNjQ=","number":116,"title":"\ud83d\udc1b Trying to use ROUGE metric : pyarrow.lib.ArrowInvalid: Column 1 named references expected length 534 but got length 323","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[{"id":2067393914,"node_id":"MDU6TGFiZWwyMDY3MzkzOTE0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20bug","name":"metric bug","color":"25b21e","default":false,"description":"A bug in a metric script"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-05-15T01:12:06Z","updated_at":"2020-05-28T23:43:07Z","closed_at":"2020-05-28T23:43:07Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to use rouge metric.\r\n\r\nI have to files : `test.pred.tokenized` and `test.gold.tokenized` with each line containing a sentence.  \r\nI tried :\r\n\r\n```python\r\nimport nlp\r\n\r\nrouge = nlp.load_metric('rouge')\r\nwith open(\"test.pred.tokenized\") as p, open(\"test.gold.tokenized\") as g:\r\n    for lp, lg in zip(p, g):\r\n            rouge.add(lp, lg)\r\n```\r\n\r\nBut I meet following error :\r\n\r\n> pyarrow.lib.ArrowInvalid: Column 1 named references expected length 534 but got length 323\r\n\r\n---\r\n\r\nFull stack-trace :\r\n\r\n```\r\nTraceback (most recent call last):\r\n  File \"\", line 3, in \r\n  File \"\/home\/me\/.venv\/transformers\/lib\/python3.6\/site-packages\/nlp\/metric.py\", line 224, in add\r\n    self.writer.write_batch(batch)\r\n  File \"\/home\/me\/.venv\/transformers\/lib\/python3.6\/site-packages\/nlp\/arrow_writer.py\", line 148, in write_batch\r\n    pa_table: pa.Table = pa.Table.from_pydict(batch_examples, schema=self._schema)\r\n  File \"pyarrow\/table.pxi\", line 1550, in pyarrow.lib.Table.from_pydict\r\n  File \"pyarrow\/table.pxi\", line 1503, in pyarrow.lib.Table.from_arrays\r\n  File \"pyarrow\/public-api.pxi\", line 390, in pyarrow.lib.pyarrow_wrap_table\r\n  File \"pyarrow\/error.pxi\", line 85, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Column 1 named references expected length 534 but got length 323\r\n```\r\n\r\n(`nlp` installed from source)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/116\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/116\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/115","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/115\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/115\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/115\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/115","id":618615855,"node_id":"MDU6SXNzdWU2MTg2MTU4NTU=","number":115,"title":"AttributeError: 'dict' object has no attribute 'info'","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2020-05-15T00:29:47Z","updated_at":"2020-05-17T13:11:00Z","closed_at":"2020-05-17T13:11:00Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I'm trying to access the information of CNN\/DM dataset :\r\n\r\n```python\r\ncnn_dm = nlp.load_dataset('cnn_dailymail')\r\nprint(cnn_dm.info)\r\n```\r\n\r\nreturns :\r\n\r\n> AttributeError: 'dict' object has no attribute 'info'","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/115\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/115\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/114","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/114\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/114\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/114\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/114","id":618611310,"node_id":"MDU6SXNzdWU2MTg2MTEzMTA=","number":114,"title":"Couldn't reach CNN\/DM dataset","user":{"login":"astariul","id":43774355,"node_id":"MDQ6VXNlcjQzNzc0MzU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43774355?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/astariul","html_url":"https:\/\/github.com\/astariul","followers_url":"https:\/\/api.github.com\/users\/astariul\/followers","following_url":"https:\/\/api.github.com\/users\/astariul\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/astariul\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/astariul\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/astariul\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/astariul\/orgs","repos_url":"https:\/\/api.github.com\/users\/astariul\/repos","events_url":"https:\/\/api.github.com\/users\/astariul\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/astariul\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-15T00:16:17Z","updated_at":"2020-05-15T00:19:52Z","closed_at":"2020-05-15T00:19:51Z","author_association":"NONE","active_lock_reason":null,"draft":null,"pull_request":null,"body":"I can't get CNN \/ DailyMail dataset.\r\n\r\n```python\r\nimport nlp\r\n\r\nassert \"cnn_dailymail\" in [dataset.id for dataset in nlp.list_datasets()]\r\ncnn_dm = nlp.load_dataset('cnn_dailymail')\r\n```\r\n\r\n[Colab notebook](https:\/\/colab.research.google.com\/drive\/1zQ3bYAVzm1h0mw0yWPqKAg_4EUlSx5Ex?usp=sharing)\r\n\r\ngives following error :\r\n\r\n```\r\nConnectionError: Couldn't reach https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/nlp\/cnn_dailymail\/cnn_dailymail.py\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/114\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/114\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/113","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/113\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/113\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/113\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/113","id":618590562,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4MjkxNjIx","number":113,"title":"Adding docstrings and some doc","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-14T23:14:41Z","updated_at":"2020-05-14T23:22:45Z","closed_at":"2020-05-14T23:22:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/113","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/113","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/113.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/113.patch","merged_at":"2020-05-14T23:22:44Z"},"body":"Some doc","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/113\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/113\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/112","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/112\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/112\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/112\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/112","id":618569195,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4Mjc0MTU4","number":112,"title":"Qa4mre - add dataset","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-14T22:17:51Z","updated_at":"2020-05-15T09:16:43Z","closed_at":"2020-05-15T09:16:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/112","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/112","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/112.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/112.patch","merged_at":"2020-05-15T09:16:42Z"},"body":"Added dummy data test only for the first config. Will do the rest later.\r\nI had to do add some minor hacks to an important function to make it work. \r\nThere might be a cleaner way to handle it - can you take a look @thomwolf ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/112\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/112\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/111","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/111\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/111\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/111\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/111","id":618528060,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4MjQwMjMy","number":111,"title":"[Clean-up] remove under construction datastes","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-14T20:52:13Z","updated_at":"2020-05-14T20:52:23Z","closed_at":"2020-05-14T20:52:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/111","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/111","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/111.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/111.patch","merged_at":"2020-05-14T20:52:22Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/111\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/111\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/110","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/110\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/110\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/110\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/110","id":618520325,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4MjMzODIy","number":110,"title":"fix reddit tifu dummy data","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-14T20:37:37Z","updated_at":"2020-05-14T20:40:14Z","closed_at":"2020-05-14T20:40:13Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/110","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/110","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/110.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/110.patch","merged_at":"2020-05-14T20:40:13Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/110\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/110\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/109","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/109\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/109\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/109\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/109","id":618508359,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4MjI0MDYw","number":109,"title":"[Reclor] fix reclor","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-14T20:16:26Z","updated_at":"2020-05-14T20:19:09Z","closed_at":"2020-05-14T20:19:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/109","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/109","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/109.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/109.patch","merged_at":"2020-05-14T20:19:08Z"},"body":"- That's probably one me. Could have made the manual data test more flexible. @mariamabarham ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/109\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/109\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/108","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/108\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/108\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/108\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/108","id":618386394,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4MTIzMzc3","number":108,"title":"convert can use manual dir as second argument","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-14T16:52:32Z","updated_at":"2020-05-14T16:52:43Z","closed_at":"2020-05-14T16:52:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/108","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/108","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/108.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/108.patch","merged_at":"2020-05-14T16:52:42Z"},"body":"@mariamabarham ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/108\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/108\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/107","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/107\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/107\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/107\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/107","id":618373045,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4MTEyNzcx","number":107,"title":"add writer_batch_size to GeneratorBasedBuilder","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-14T16:35:39Z","updated_at":"2020-05-14T16:50:30Z","closed_at":"2020-05-14T16:50:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/107","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/107","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/107.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/107.patch","merged_at":"2020-05-14T16:50:29Z"},"body":"You can now specify `writer_batch_size` in the builder arguments or directly in `load_dataset`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/107\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/107\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/106","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/106\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/106\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/106\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/106","id":618361418,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4MTAzMjM3","number":106,"title":"Add data dir test command","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-14T16:18:39Z","updated_at":"2020-05-14T16:49:11Z","closed_at":"2020-05-14T16:49:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/106","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/106","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/106.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/106.patch","merged_at":"2020-05-14T16:49:10Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/106\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/106\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/105","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/105\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/105\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/105\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/105","id":618345191,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4MDg5Njgz","number":105,"title":"[New structure on AWS] Adapt paths","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-14T15:55:57Z","updated_at":"2020-05-14T15:56:28Z","closed_at":"2020-05-14T15:56:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/105","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/105","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/105.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/105.patch","merged_at":"2020-05-14T15:56:27Z"},"body":"Some small changes so that we have the correct paths. @julien-c ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/105\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/105\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/104","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/104\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/104\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/104\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/104","id":618277081,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE4MDMzOTY0","number":104,"title":"Add trivia_q","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-14T14:27:19Z","updated_at":"2020-07-12T05:34:20Z","closed_at":"2020-05-14T20:23:32Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/104","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/104","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/104.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/104.patch","merged_at":"2020-05-14T20:23:32Z"},"body":"Currently tested only for one config to pass tests. Needs to add more dummy data later.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/104\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/104\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/103","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/103\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/103\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/103\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/103","id":618233637,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3OTk5MDIy","number":103,"title":"[Manual downloads] add logic proposal for manual downloads and add wikihow","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-14T13:30:36Z","updated_at":"2020-05-14T14:27:41Z","closed_at":"2020-05-14T14:27:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/103","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/103","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/103.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/103.patch","merged_at":"2020-05-14T14:27:40Z"},"body":"Wikihow is an example that needs to manually download two files as stated in: https:\/\/github.com\/mahnazkoupaee\/WikiHow-Dataset. \r\n\r\nThe user can then store these files under a hard-coded name: `wikihowAll.csv` and `wikihowSep.csv` in this case in a directory of his choice, e.g. `~\/wikihow\/manual_dir`.\r\n\r\nThe dataset can then be loaded via:\r\n\r\n```python\r\nimport nlp\r\nnlp.load_dataset(\"wikihow\", data_dir=\"~\/wikihow\/manual_dir\")\r\n```\r\n\r\nI added\/changed so that there are explicit error messages when using manually downloaded files.\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/103\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/103\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/102","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/102\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/102\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/102\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/102","id":618231216,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3OTk3MDQz","number":102,"title":"Run save infos","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-14T13:27:26Z","updated_at":"2020-05-14T15:43:04Z","closed_at":"2020-05-14T15:43:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/102","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/102","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/102.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/102.patch","merged_at":"2020-05-14T15:43:03Z"},"body":"I replaced the old checksum file with the new `dataset_infos.json` by running the script on almost all the datasets we have. The only one that is still running on my side is the cornell dialog","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/102\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/102\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/101","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/101\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/101\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/101\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/101","id":618111651,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3ODk5OTQ2","number":101,"title":"[Reddit] add reddit","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-14T10:25:02Z","updated_at":"2020-05-14T10:27:25Z","closed_at":"2020-05-14T10:27:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/101","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/101","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/101.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/101.patch","merged_at":"2020-05-14T10:27:24Z"},"body":"- Everything worked fine @mariamabarham. Made my computer nearly crash, but all seems to be working :-) ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/101\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/101\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/100","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/100\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/100\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/100\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/100","id":618081602,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3ODc1MjE2","number":100,"title":"Add per type scores in seqeval metric","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-05-14T09:37:52Z","updated_at":"2020-05-14T23:21:35Z","closed_at":"2020-05-14T23:21:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/100","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/100","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/100.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/100.patch","merged_at":"2020-05-14T23:21:34Z"},"body":"This PR add a bit more detail in the seqeval metric. Now the usage and output are:\r\n\r\n```python\r\nimport nlp\r\nmet = nlp.load_metric('metrics\/seqeval')\r\nreferences = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]\r\npredictions =  [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]\r\nmet.compute(predictions, references)\r\n\r\n#Output: {'PER': {'precision': 1.0, 'recall': 1.0, 'f1': 1.0, 'number': 1}, 'MISC': {'precision': 0.0, 'recall': 0.0, 'f1': 0, 'number': 1}, 'overall_precision': 0.5, 'overall_recall': 0.5, 'overall_f1': 0.5, 'overall_accuracy': 0.8}\r\n```\r\n\r\nIt is also possible to compute scores for non IOB notations, POS tagging for example hasn't this kind of notation. Add `suffix` parameter:\r\n\r\n```python\r\nimport nlp\r\nmet = nlp.load_metric('metrics\/seqeval')\r\nreferences = [['O', 'O', 'O', 'MISC', 'MISC', 'MISC', 'O'], ['PER', 'PER', 'O']]\r\npredictions =  [['O', 'O', 'MISC', 'MISC', 'MISC', 'MISC', 'O'], ['PER', 'PER', 'O']]\r\nmet.compute(predictions, references, metrics_kwargs={\"suffix\": True})\r\n\r\n#Output: {'PER': {'precision': 1.0, 'recall': 1.0, 'f1': 1.0, 'number': 1}, 'MISC': {'precision': 0.0, 'recall': 0.0, 'f1': 0, 'number': 1}, 'overall_precision': 0.5, 'overall_recall': 0.5, 'overall_f1': 0.5, 'overall_accuracy': 0.9}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/100\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/100\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/99","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/99\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/99\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/99\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/99","id":618026700,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3ODMxNjky","number":99,"title":"[Cmrc 2018] fix cmrc2018","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-14T08:22:03Z","updated_at":"2020-05-14T08:49:42Z","closed_at":"2020-05-14T08:49:41Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/99","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/99","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/99.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/99.patch","merged_at":"2020-05-14T08:49:41Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/99\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/99\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/98","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/98\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/98\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/98\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/98","id":617957739,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3Nzc3NDcy","number":98,"title":"Webis tl-dr","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2020-05-14T06:22:18Z","updated_at":"2020-09-03T10:00:21Z","closed_at":"2020-05-14T20:54:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/98","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/98","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/98.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/98.patch","merged_at":"2020-05-14T20:54:15Z"},"body":"Add the Webid TL:DR dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/98\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/98\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/97","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/97\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/97\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/97\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/97","id":617809431,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3NjU4MDcy","number":97,"title":"[Csv] add tests for csv dataset script","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-13T23:06:11Z","updated_at":"2020-05-13T23:23:16Z","closed_at":"2020-05-13T23:23:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/97","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/97","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/97.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/97.patch","merged_at":"2020-05-13T23:23:15Z"},"body":"Adds dummy data tests for csv.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/97\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/97\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/96","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/96\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/96\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/96\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/96","id":617739521,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3NjAwMjY4","number":96,"title":"lm1b","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-13T20:38:44Z","updated_at":"2020-05-14T14:13:30Z","closed_at":"2020-05-14T14:13:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/96","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/96","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/96.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/96.patch","merged_at":"2020-05-14T14:13:29Z"},"body":"Add lm1b dataset.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/96\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/96\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/95","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/95\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/95\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/95\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/95","id":617703037,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3NTY5NzA4","number":95,"title":"Replace checksums files by Dataset infos json","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-13T19:36:16Z","updated_at":"2020-05-14T08:58:43Z","closed_at":"2020-05-14T08:58:42Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/95","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/95","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/95.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/95.patch","merged_at":"2020-05-14T08:58:42Z"},"body":"### Better verifications when loading a dataset\r\n\r\nI replaced the `urls_checksums` directory that used to contain `checksums.txt` and `cached_sizes.txt`, by a single file `dataset_infos.json`. It's just a dict `config_name` -> `DatasetInfo`.\r\n\r\nIt simplifies and improves how verifications of checksums and splits sizes are done, as they're all stored in `DatasetInfo` (one per config). Also, having already access to `DatasetInfo` enables to check disk space before running `download_and_prepare` for a given config.\r\n\r\nThe dataset infos json file is user readable, you can take a look at the squad one that I generated in this PR.\r\n\r\n### Renaming\r\n\r\nAccording to these changes, I did some renaming:\r\n`save_checksums` -> `save_infos`\r\n`ignore_checksums` -> `ignore_verifications`\r\n\r\nfor example, when you are creating a dataset you have to run\r\n```nlp-cli test path\/to\/my\/dataset --save_infos --all_configs```\r\ninstead of\r\n```nlp-cli test path\/to\/my\/dataset --save_checksums --all_configs```\r\n\r\n### And now, the fun part\r\n\r\nWe'll have to rerun the `nlp-cli test ... --save_infos --all_configs` for all the datasets\r\n\r\n-----------------\r\n\r\nfeedback appreciated !","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/95\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/95\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/94","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/94\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/94\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/94\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/94","id":617571340,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3NDYyMTIw","number":94,"title":"Librispeech","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-13T16:04:14Z","updated_at":"2020-05-13T21:29:03Z","closed_at":"2020-05-13T21:29:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/94","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/94","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/94.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/94.patch","merged_at":"2020-05-13T21:29:02Z"},"body":"Add librispeech dataset and remove some useless content.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/94\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/94\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/93","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/93\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/93\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/93\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/93","id":617522029,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3NDIxODUy","number":93,"title":"Cleanup notebooks and various fixes","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-13T14:58:58Z","updated_at":"2020-05-13T15:01:48Z","closed_at":"2020-05-13T15:01:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/93","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/93","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/93.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/93.patch","merged_at":"2020-05-13T15:01:47Z"},"body":"Fixes on dataset (more flexible) metrics (fix) and general clean ups","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/93\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/93\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/92","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/92\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/92\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/92\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/92","id":617341505,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3Mjc1ODky","number":92,"title":"[WIP] add wmt14","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-13T10:42:03Z","updated_at":"2020-05-16T11:17:38Z","closed_at":"2020-05-16T11:17:37Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/92","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/92","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/92.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/92.patch","merged_at":"2020-05-16T11:17:37Z"},"body":"WMT14 takes forever to download :-\/ \r\n\r\n- WMT is the first dataset that uses an abstract class IMO, so I had to modify the `load_dataset_module` a bit.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/92\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/92\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/91","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/91\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/91\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/91\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/91","id":617339484,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3Mjc0MjA0","number":91,"title":"[Paracrawl] add paracrawl","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-13T10:39:00Z","updated_at":"2020-05-13T10:40:15Z","closed_at":"2020-05-13T10:40:14Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/91","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/91","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/91.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/91.patch","merged_at":"2020-05-13T10:40:14Z"},"body":"- Huge dataset - took ~1h to download\r\n- Also this PR reformats all dataset scripts and adds `datasets` to `make style`","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/91\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/91\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/90","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/90\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/90\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/90\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/90","id":617311877,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3MjUxODE0","number":90,"title":"Add download gg drive","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-13T09:56:02Z","updated_at":"2020-05-13T12:46:28Z","closed_at":"2020-05-13T10:05:31Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/90","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/90","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/90.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/90.patch","merged_at":"2020-05-13T10:05:31Z"},"body":"We can now add datasets that download from google drive","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/90\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/90\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/89","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/89\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/89\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/89\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/89","id":617295069,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3MjM4MjU4","number":89,"title":"Add list and inspect methods - cleanup hf_api","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-13T09:30:15Z","updated_at":"2020-05-13T14:05:00Z","closed_at":"2020-05-13T09:33:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/89","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/89","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/89.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/89.patch","merged_at":"2020-05-13T09:33:10Z"},"body":"Add a bunch of methods to easily list and inspect the processing scripts up-loaded on S3:\r\n```python\r\nnlp.list_datasets()\r\nnlp.list_metrics()\r\n# Copy and prepare the scripts at `local_path` for easy inspection\/modification.\r\nnlp.inspect_dataset(path, local_path) \r\n# Copy and prepare the scripts at `local_path` for easy inspection\/modification.\r\nnlp.inspect_metric(path, local_path) \r\n```\r\n\r\nAlso clean up the `HfAPI` to use `dataclasses` for better user-experience","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/89\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/89\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/88","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/88\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/88\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/88\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/88","id":617284664,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3MjI5ODQw","number":88,"title":"Add wiki40b","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-13T09:16:01Z","updated_at":"2020-05-13T12:31:55Z","closed_at":"2020-05-13T12:31:54Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/88","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/88","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/88.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/88.patch","merged_at":"2020-05-13T12:31:54Z"},"body":"This one is a beam dataset that downloads files using tensorflow.\r\nI tested it on a small config and it works fine","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/88\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/88\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/87","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/87\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/87\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/87\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/87","id":617267118,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3MjE1NzA0","number":87,"title":"Add Flores","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-13T08:51:29Z","updated_at":"2020-05-13T09:23:34Z","closed_at":"2020-05-13T09:23:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/87","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/87","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/87.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/87.patch","merged_at":"2020-05-13T09:23:33Z"},"body":"Beautiful language for sure!","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/87\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/87\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/86","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/86\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/86\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/86\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/86","id":617260972,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3MjEwNzY2","number":86,"title":"[Load => load_dataset] change naming","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-13T08:43:00Z","updated_at":"2020-05-13T08:50:58Z","closed_at":"2020-05-13T08:50:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/86","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/86","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/86.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/86.patch","merged_at":"2020-05-13T08:50:57Z"},"body":"Rename leftovers @thomwolf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/86\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/86\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/85","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/85\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/85\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/85\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/85","id":617253428,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3MjA0ODA4","number":85,"title":"Add boolq","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-13T08:32:27Z","updated_at":"2020-05-13T09:09:39Z","closed_at":"2020-05-13T09:09:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/85","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/85","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/85.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/85.patch","merged_at":"2020-05-13T09:09:38Z"},"body":"I just added the dummy data for this dataset.\r\nThis one was uses `tf.io.gfile.copy` to download the data but I added the support for custom download in the mock_download_manager. I also had to add a `tensorflow` dependency for tests.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/85\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/85\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/84","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/84\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/84\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/84\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/84","id":617249815,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE3MjAxODcz","number":84,"title":"[TedHrLr] add left dummy data","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-13T08:27:20Z","updated_at":"2020-05-13T08:29:22Z","closed_at":"2020-05-13T08:29:21Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/84","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/84","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/84.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/84.patch","merged_at":"2020-05-13T08:29:21Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/84\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/84\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/83","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/83\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/83\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/83\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/83","id":616863601,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2ODkyOTUz","number":83,"title":"New datasets","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-12T18:22:27Z","updated_at":"2020-05-12T18:22:47Z","closed_at":"2020-05-12T18:22:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/83","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/83","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/83.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/83.patch","merged_at":"2020-05-12T18:22:45Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/83\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/83\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/82","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/82\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/82\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/82\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/82","id":616805194,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2ODQ1Njc5","number":82,"title":"[Datasets] add ted_hrlr","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-12T16:46:50Z","updated_at":"2020-05-13T07:52:54Z","closed_at":"2020-05-13T07:52:53Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/82","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/82","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/82.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/82.patch","merged_at":"2020-05-13T07:52:52Z"},"body":"@thomwolf - After looking at `xnli` I think it's better to leave the translation features and add a `translation` key to make them work in our framework. \r\n\r\nThe result looks like this:\r\n![Screenshot from 2020-05-12 18-34-43](https:\/\/user-images.githubusercontent.com\/23423619\/81721933-ee1faf00-9480-11ea-9e95-d6557cbd0ce0.png)\r\n\r\nyou can see that each split has a `translation` key which value is the nlp.features.Translation object. \r\n\r\nThat's a simple change. If it's ok for you, I will add dummy data for the other configs and treat the other translation scripts in the same way.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/82\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/82\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/81","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/81\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/81\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/81\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/81","id":616793010,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2ODM1NzE1","number":81,"title":"add tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-12T16:28:19Z","updated_at":"2020-05-13T07:43:57Z","closed_at":"2020-05-13T07:43:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/81","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/81","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/81.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/81.patch","merged_at":"2020-05-13T07:43:56Z"},"body":"Tests for py_utils functions and for the BaseReader used to read from arrow and parquet.\r\nI also removed unused utils functions.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/81\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/81\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/80","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/80\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/80\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/80\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/80","id":616786803,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2ODMwNjk3","number":80,"title":"Add nbytes + nexamples check","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-12T16:18:43Z","updated_at":"2020-05-13T07:52:34Z","closed_at":"2020-05-13T07:52:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/80","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/80","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/80.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/80.patch","merged_at":"2020-05-13T07:52:33Z"},"body":"### Save size and number of examples\r\nNow when you do `save_checksums`, it also create `cached_sizes.txt` right next to the checksum file.\r\nThis new file stores the bytes sizes and the number of examples of each split that has been prepared and stored in the cache. Example:\r\n\r\n```\r\n# Cached sizes:   \r\nhansards\/house\/1.0.0\/test 22906629 122290\r\nhansards\/house\/1.0.0\/train 191459584 947969\r\nhansards\/senate\/1.0.0\/test 5711686 25553\r\nhansards\/senate\/1.0.0\/train 40324278 182135\r\n```\r\n\r\n### Check processing output \r\n\r\nIf there is a `caches_sizes.txt`, then each time we run `download_and_prepare` it will make sure that the sizes match. You can set `ignore_checksums=True` if you don't want that to happen.\r\n\r\n### Fill Dataset Info\r\n\r\nAll the split infos and the checksums are now stored correctly in DatasetInfo after `download_and_prepare`\r\n\r\n### Check space on disk before running `download_and_prepare`\r\n\r\nCheck if the space is lower than the sum of the sizes of the files in `checksums.txt` and `cached_files.txt`. This is not ideal though as it considers the files for all configs.\r\n\r\nTODO:\r\nA better way to do it would be to have save the `DatasetInfo` instead of the `checksums.txt` and `cached_sizes.txt`, in order to have one file per dataset config (and therefore consider only the sizes of the files for one config and not all of them). It can also be the occasion to factorize all the `download_and_prepare` verifications. Maybe next PR ?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/80\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/80\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/79","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/79\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/79\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/79\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/79","id":616785613,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2ODI5NzMy","number":79,"title":"[Convert] add new pattern","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-12T16:16:51Z","updated_at":"2020-05-12T16:17:10Z","closed_at":"2020-05-12T16:17:09Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/79","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/79","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/79.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/79.patch","merged_at":"2020-05-12T16:17:09Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/79\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/79\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/78","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/78\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/78\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/78\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/78","id":616774275,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2ODIwNzU5","number":78,"title":"[Tests] skip beam dataset tests for now","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-12T16:00:58Z","updated_at":"2020-05-12T16:16:24Z","closed_at":"2020-05-12T16:16:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/78","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/78","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/78.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/78.patch","merged_at":"2020-05-12T16:16:22Z"},"body":"For now we will skip tests for Beam Datasets","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/78\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/78\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/77","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/77\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/77\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/77\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/77","id":616674601,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2NzQwMjAz","number":77,"title":"New datasets","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-12T13:51:59Z","updated_at":"2020-05-12T14:02:16Z","closed_at":"2020-05-12T14:02:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/77","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/77","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/77.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/77.patch","merged_at":"2020-05-12T14:02:15Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/77\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/77\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/76","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/76\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/76\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/76\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/76","id":616579228,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2NjYyMTk2","number":76,"title":"pin flake 8","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-12T11:25:29Z","updated_at":"2020-05-12T11:27:35Z","closed_at":"2020-05-12T11:27:34Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/76","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/76","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/76.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/76.patch","merged_at":"2020-05-12T11:27:34Z"},"body":"Flake 8's new version does not like our format. Pinning the version for now.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/76\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/76\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/75","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/75\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/75\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/75\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/75","id":616520163,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2NjE0MzU1","number":75,"title":"WIP adding metrics","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-12T09:52:00Z","updated_at":"2020-05-13T07:44:12Z","closed_at":"2020-05-13T07:44:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/75","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/75","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/75.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/75.patch","merged_at":"2020-05-13T07:44:10Z"},"body":"Adding the following metrics as identified by @mariamabarham:\r\n\r\n1. BLEU:  BiLingual Evaluation Understudy: https:\/\/github.com\/tensorflow\/nmt\/blob\/master\/nmt\/scripts\/bleu.py,  https:\/\/github.com\/chakki-works\/sumeval\/blob\/master\/sumeval\/metrics\/bleu.py (multilingual)\r\n2. GLEU:  Google-BLEU:  https:\/\/github.com\/cnap\/gec-ranking\/blob\/master\/scripts\/compute_gleu\r\n3. Sacrebleu: https:\/\/pypi.org\/project\/sacrebleu\/1.4.8\/ (pypi package), https:\/\/github.com\/mjpost\/sacrebleu (github implementation)\r\n4. ROUGE: Recall-Oriented Understudy for Gisting Evaluation: https:\/\/github.com\/google-research\/google-research\/tree\/master\/rouge, https:\/\/github.com\/chakki-works\/sumeval\/blob\/master\/sumeval\/metrics\/rouge.py (multilingual)\r\n5. Seqeval: https:\/\/github.com\/chakki-works\/seqeval (github implementation), https:\/\/pypi.org\/project\/seqeval\/0.0.12\/ (pypi package)\r\n6. Coval:  coreference evaluation package for the CoNLL and ARRAU datasets https:\/\/github.com\/ns-moosavi\/coval\r\n7. SQuAD v1 evaluation script\r\n8. SQuAD V2 evaluation script: https:\/\/worksheets.codalab.org\/rest\/bundles\/0x6b567e1cf2e041ec80d7098f031c5c9e\/contents\/blob\/\r\n9. GLUE\r\n10. XNLI\r\n\r\n\r\nNot now:\r\n1. Perplexity: https:\/\/github.com\/allenai\/allennlp\/blob\/master\/allennlp\/training\/metrics\/perplexity.py\r\n2. Spearman: https:\/\/github.com\/allenai\/allennlp\/blob\/master\/allennlp\/training\/metrics\/spearman_correlation.py\r\n3. F1_measure: https:\/\/github.com\/allenai\/allennlp\/blob\/master\/allennlp\/training\/metrics\/f1_measure.py\r\n4. Pearson_corelation: https:\/\/github.com\/allenai\/allennlp\/blob\/master\/allennlp\/training\/metrics\/pearson_correlation.py\r\n5. AUC: https:\/\/github.com\/allenai\/allennlp\/blob\/master\/allennlp\/training\/metrics\/auc.py \r\n6. Entropy: https:\/\/github.com\/allenai\/allennlp\/blob\/master\/allennlp\/training\/metrics\/entropy.py","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/75\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/75\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/74","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/74\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/74\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/74\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/74","id":616511101,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2NjA3MDcy","number":74,"title":"fix overflow check","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-12T09:38:01Z","updated_at":"2020-05-12T10:04:39Z","closed_at":"2020-05-12T10:04:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/74","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/74","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/74.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/74.patch","merged_at":"2020-05-12T10:04:37Z"},"body":"I did some tests and unfortunately the test\r\n```\r\npa_array.nbytes > MAX_BATCH_BYTES\r\n```\r\ndoesn't work. Indeed for a StructArray, `nbytes` can be less 2GB even if there is an overflow (it loops...).\r\n\r\nI don't think we can do a proper overflow test for the limit of 2GB...\r\n\r\nFor now I replaced it with a sanity check on the first element.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/74\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/74\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/73","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/73\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/73\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/73\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/73","id":616417845,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2NTMyMTg1","number":73,"title":"JSON script","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-05-12T07:11:22Z","updated_at":"2020-05-18T06:50:37Z","closed_at":"2020-05-18T06:50:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/73","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/73","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/73.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/73.patch","merged_at":"2020-05-18T06:50:36Z"},"body":"Add a JSONS script to read JSON datasets from files.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/73\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/73\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/72","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/72\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/72\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/72\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/72","id":616225010,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2Mzc4Mjg4","number":72,"title":"[README dummy data tests] README to better understand how the dummy data structure works","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-11T22:19:03Z","updated_at":"2020-05-11T22:26:03Z","closed_at":"2020-05-11T22:26:01Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/72","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/72","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/72.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/72.patch","merged_at":"2020-05-11T22:26:01Z"},"body":"In this PR a README.md is added to tests to shine more light on how the dummy data structure works. I try to explain the different possible cases. IMO the best way to understand the logic is to checkout the dummy data structure of the different datasets I mention in the README.md since those are the \"edge cases\". \r\n\r\n@mariamabarham @thomwolf @lhoestq @jplu - I'd be happy to checkout the dummy data structure and get some feedback on possible improvements.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/72\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/72\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/71","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/71\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/71\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/71\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/71","id":615942180,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE2MTUxODM4","number":71,"title":"Fix arrow writer for big datasets using writer_batch_size","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-11T14:45:36Z","updated_at":"2020-05-11T20:09:47Z","closed_at":"2020-05-11T20:00:38Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/71","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/71","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/71.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/71.patch","merged_at":"2020-05-11T20:00:38Z"},"body":"This PR fixes Yacine's bug.\r\nAccording to [this](https:\/\/github.com\/apache\/arrow\/blob\/master\/docs\/source\/cpp\/arrays.rst#size-limitations-and-recommendations), it is not recommended to have pyarrow arrays bigger than 2Go.\r\n\r\nTherefore I set a default batch size of 100 000 examples per batch. In general it shouldn't exceed 2Go. If it does, I reduce the batch_size on the fly, and I notify the user with a warning.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/71\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/71\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/70","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/70\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/70\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/70\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/70","id":615679102,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE1OTM3NDgw","number":70,"title":"adding RACE, QASC, Super_glue and Tiny_shakespear datasets","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-11T08:07:49Z","updated_at":"2020-05-12T13:21:52Z","closed_at":"2020-05-12T13:21:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/70","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/70","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/70.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/70.patch","merged_at":"2020-05-12T13:21:51Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/70\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/70\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/69","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/69\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/69\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/69\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/69","id":615450534,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE1NzYyNTQ4","number":69,"title":"fix cache dir in builder tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-10T18:39:21Z","updated_at":"2020-05-11T07:19:30Z","closed_at":"2020-05-11T07:19:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/69","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/69","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/69.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/69.patch","merged_at":"2020-05-11T07:19:28Z"},"body":"minor fix","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/69\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/69\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/68","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/68\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/68\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/68\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/68","id":614882655,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE1MzQ3NTgw","number":68,"title":"[CSV] re-add csv","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-08T17:38:29Z","updated_at":"2020-05-08T17:40:48Z","closed_at":"2020-05-08T17:40:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/68","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/68","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/68.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/68.patch","merged_at":"2020-05-08T17:40:46Z"},"body":"Re-adding csv under the datasets under construction to keep circle ci happy - will have to see how to include it in the tests.\r\n\r\n@lhoestq noticed that I accidently deleted it in https:\/\/github.com\/huggingface\/nlp\/pull\/63#discussion_r422263729.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/68\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/68\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/67","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/67\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/67\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/67\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/67","id":614798483,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE1Mjc5NjI0","number":67,"title":"[Tests] Test files locally","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-08T15:02:43Z","updated_at":"2020-05-08T19:50:47Z","closed_at":"2020-05-08T15:17:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/67","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/67","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/67.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/67.patch","merged_at":"2020-05-08T15:17:00Z"},"body":"This PR adds a `aws` and a `local` decorator to the tests so that tests now run on the local datasets. \r\n\r\nBy default, the `aws` is deactivated and `local` is activated and `slow` is deactivated, so that only 1 test per dataset runs on circle ci. \r\n\r\n**When local is activated all folders in `.\/datasets` are tested.**\r\n\r\n**Important** When adding a dataset, we should no longer upload it to AWS. The steps are:\r\n1. Open a PR\r\n2. Add a dataset as described in `datasets\/README.md`\r\n3. If all tests pass, push to master\r\n\r\nCurrently we have 49 functional datasets in our code base. \r\n\r\nWe have 6 datasets \"under-construction\" that don't pass the tests - so I put them in a folder \"datasets_under_construction\" - it would be nice to open a PR to fix them and put them in the `datasets` folder.\r\n\r\n**Important** when running tests locally, the datasets are cached so to rerun them delete your local cache via:\r\n`rm -r ~\/.cache\/huggingface\/datasets\/*` \r\n\r\n@thomwolf @mariamabarham @lhoestq ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/67\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/67\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/66","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/66\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/66\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/66\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/66","id":614748552,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE1MjM5Njgy","number":66,"title":"[Datasets] ReadME","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-08T13:37:43Z","updated_at":"2020-05-08T13:39:23Z","closed_at":"2020-05-08T13:39:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/66","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/66","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/66.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/66.patch","merged_at":"2020-05-08T13:39:22Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/66\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/66\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/65","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/65\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/65\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/65\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/65","id":614746516,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE1MjM4MDEw","number":65,"title":"fix math dataset and xcopa","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-08T13:33:55Z","updated_at":"2020-05-08T13:35:41Z","closed_at":"2020-05-08T13:35:40Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/65","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/65","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/65.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/65.patch","merged_at":"2020-05-08T13:35:40Z"},"body":"- fixes math dataset and xcopa, uploaded both of the to S3","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/65\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/65\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/64","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/64\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/64\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/64\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/64","id":614737057,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE1MjMwMjYy","number":64,"title":"[Datasets] Make master ready for datasets adding","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-08T13:17:00Z","updated_at":"2020-05-08T13:17:31Z","closed_at":"2020-05-08T13:17:30Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/64","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/64","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/64.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/64.patch","merged_at":"2020-05-08T13:17:30Z"},"body":"Add all relevant files so that datasets can now be added on master","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/64\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/64\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/63","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/63\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/63\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/63\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/63","id":614666365,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE1MTczODU5","number":63,"title":"[Dataset scripts] add all datasets scripts","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-08T10:50:15Z","updated_at":"2020-05-08T17:39:22Z","closed_at":"2020-05-08T11:34:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/63","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/63","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/63.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/63.patch","merged_at":"2020-05-08T11:34:00Z"},"body":"As mentioned, we can have the canonical datasets in the master. For now I also want to include all the data as present on S3 to make the synchronization easier when uploading new datastes. \r\n\r\n@mariamabarham @lhoestq @thomwolf - what do you think? \r\n\r\nIf this is ok for you, I can sync up the master with the `add_dataset` branch: https:\/\/github.com\/huggingface\/nlp\/pull\/37 so that master is up to date. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/63\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/63\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/62","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/62\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/62\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/62\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/62","id":614630830,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE1MTQ1NDAx","number":62,"title":"[Cached Path] Better error message","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-08T09:39:47Z","updated_at":"2020-05-08T09:45:47Z","closed_at":"2020-05-08T09:45:47Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/62","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/62","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/62.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/62.patch","merged_at":null},"body":"IMO returning `None` in this function only leads to confusion and is never helpful.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/62\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/62\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/61","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/61\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/61\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/61\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/61","id":614607474,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE1MTI3MTU4","number":61,"title":"[Load] rename setup_module to prepare_module","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-08T08:54:22Z","updated_at":"2020-05-08T08:56:32Z","closed_at":"2020-05-08T08:56:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/61","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/61","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/61.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/61.patch","merged_at":"2020-05-08T08:56:16Z"},"body":"rename setup_module to prepare_module due to issues with pytests `setup_module` function.\r\nSee: PR #59. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/61\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/61\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/60","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/60\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/60\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/60\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/60","id":614372553,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE0OTQyNjEy","number":60,"title":"Update to simplify some datasets conversion","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-05-07T22:02:24Z","updated_at":"2020-05-08T10:38:32Z","closed_at":"2020-05-08T10:18:24Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/60","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/60","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/60.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/60.patch","merged_at":"2020-05-08T10:18:24Z"},"body":"This PR updates the encoding of `Values` like `integers`, `boolean` and `float` to use python casting and avoid having to cast in the dataset scripts, as mentioned here: https:\/\/github.com\/huggingface\/nlp\/pull\/37#discussion_r420176626\r\n\r\nWe could also change (not included in this PR yet):\r\n- `supervized_keys` to make them a NamedTuple instead of a dataclass, and\r\n- handle specifically the `Translation` features.\r\nas mentioned here: https:\/\/github.com\/huggingface\/nlp\/pull\/37#discussion_r421740236\r\n\r\n@patrickvonplaten @mariamabarham tell me if you want these two last changes as well.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/60\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/60\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/59","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/59\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/59\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/59\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/59","id":614366045,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE0OTM3NTgx","number":59,"title":"Fix tests","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-05-07T21:48:09Z","updated_at":"2020-05-08T10:57:57Z","closed_at":"2020-05-08T10:46:51Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/59","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/59","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/59.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/59.patch","merged_at":"2020-05-08T10:46:51Z"},"body":"@patrickvonplaten I've broken a bit the tests with #25 while simplifying and re-organizing the `load.py` and `download_manager.py` scripts.\r\n\r\nI'm trying to fix them here but I have a weird error, do you think you can have a look?\r\n```bash\r\n(datasets) MacBook-Pro-de-Thomas:datasets thomwolf$ python -m pytest -sv .\/tests\/test_dataset_common.py::DatasetTest::test_builder_class_snli\r\n============================================================================= test session starts =============================================================================\r\nplatform darwin -- Python 3.7.7, pytest-5.4.1, py-1.8.1, pluggy-0.13.1 -- \/Users\/thomwolf\/miniconda2\/envs\/datasets\/bin\/python\r\ncachedir: .pytest_cache\r\nrootdir: \/Users\/thomwolf\/Documents\/GitHub\/datasets\r\nplugins: xdist-1.31.0, forked-1.1.3\r\ncollected 1 item                                                                                                                                                              \r\n\r\ntests\/test_dataset_common.py::DatasetTest::test_builder_class_snli ERROR\r\n\r\n=================================================================================== ERRORS ====================================================================================\r\n____________________________________________________________ ERROR at setup of DatasetTest.test_builder_class_snli ____________________________________________________________\r\n\r\nfile_path = \r\ndownload_config = DownloadConfig(cache_dir=None, force_download=False, resume_download=False, local_files_only=False, proxies=None, user_agent=None, extract_compressed_file=True, force_extract=True)\r\ndownload_kwargs = {}\r\n\r\n    def setup_module(file_path: str, download_config: Optional[DownloadConfig] = None, **download_kwargs,) -> DatasetBuilder:\r\n        r\"\"\"\r\n            Download\/extract\/cache a dataset to add to the lib from a path or url which can be:\r\n                - a path to a local directory containing the dataset processing python script\r\n                - an url to a S3 directory with a dataset processing python script\r\n    \r\n            Dataset codes are cached inside the lib to allow easy import (avoid ugly sys.path tweaks)\r\n            and using cloudpickle (among other things).\r\n    \r\n            Return: tuple of\r\n                the unique id associated to the dataset\r\n                the local path to the dataset\r\n        \"\"\"\r\n        if download_config is None:\r\n            download_config = DownloadConfig(**download_kwargs)\r\n        download_config.extract_compressed_file = True\r\n        download_config.force_extract = True\r\n    \r\n>       name = list(filter(lambda x: x, file_path.split(\"\/\")))[-1] + \".py\"\r\nE       AttributeError: module 'tests.test_dataset_common' has no attribute 'split'\r\n\r\nsrc\/nlp\/load.py:169: AttributeError\r\n============================================================================== warnings summary ===============================================================================\r\n\/Users\/thomwolf\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/tensorflow_core\/python\/pywrap_tensorflow_internal.py:15\r\n  \/Users\/thomwolf\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/tensorflow_core\/python\/pywrap_tensorflow_internal.py:15: DeprecationWarning: the imp module is deprecated in favour of importlib; see the module's documentation for alternative uses\r\n    import imp\r\n\r\n-- Docs: https:\/\/docs.pytest.org\/en\/latest\/warnings.html\r\n=========================================================================== short test summary info ===========================================================================\r\nERROR tests\/test_dataset_common.py::DatasetTest::test_builder_class_snli - AttributeError: module 'tests.test_dataset_common' has no attribute 'split'\r\n========================================================================= 1 warning, 1 error in 3.63s =========================================================================\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/59\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/59\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/58","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/58\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/58\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/58\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/58","id":614362308,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE0OTM0NTY4","number":58,"title":"Aborted PR - Fix tests","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-07T21:40:19Z","updated_at":"2020-05-07T21:48:01Z","closed_at":"2020-05-07T21:41:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/58","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/58","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/58.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/58.patch","merged_at":null},"body":"@patrickvonplaten I've broken a bit the tests with #25 while simplifying and re-organizing the `load.py` and `download_manager.py` scripts.\r\n\r\nI'm trying to fix them here but I have a weird error, do you think you can have a look?\r\n```bash\r\n(datasets) MacBook-Pro-de-Thomas:datasets thomwolf$ python -m pytest -sv .\/tests\/test_dataset_common.py::DatasetTest::test_builder_class_snli\r\n============================================================================= test session starts =============================================================================\r\nplatform darwin -- Python 3.7.7, pytest-5.4.1, py-1.8.1, pluggy-0.13.1 -- \/Users\/thomwolf\/miniconda2\/envs\/datasets\/bin\/python\r\ncachedir: .pytest_cache\r\nrootdir: \/Users\/thomwolf\/Documents\/GitHub\/datasets\r\nplugins: xdist-1.31.0, forked-1.1.3\r\ncollected 1 item                                                                                                                                                              \r\n\r\ntests\/test_dataset_common.py::DatasetTest::test_builder_class_snli ERROR\r\n\r\n=================================================================================== ERRORS ====================================================================================\r\n____________________________________________________________ ERROR at setup of DatasetTest.test_builder_class_snli ____________________________________________________________\r\n\r\nfile_path = \r\ndownload_config = DownloadConfig(cache_dir=None, force_download=False, resume_download=False, local_files_only=False, proxies=None, user_agent=None, extract_compressed_file=True, force_extract=True)\r\ndownload_kwargs = {}\r\n\r\n    def setup_module(file_path: str, download_config: Optional[DownloadConfig] = None, **download_kwargs,) -> DatasetBuilder:\r\n        r\"\"\"\r\n            Download\/extract\/cache a dataset to add to the lib from a path or url which can be:\r\n                - a path to a local directory containing the dataset processing python script\r\n                - an url to a S3 directory with a dataset processing python script\r\n    \r\n            Dataset codes are cached inside the lib to allow easy import (avoid ugly sys.path tweaks)\r\n            and using cloudpickle (among other things).\r\n    \r\n            Return: tuple of\r\n                the unique id associated to the dataset\r\n                the local path to the dataset\r\n        \"\"\"\r\n        if download_config is None:\r\n            download_config = DownloadConfig(**download_kwargs)\r\n        download_config.extract_compressed_file = True\r\n        download_config.force_extract = True\r\n    \r\n>       name = list(filter(lambda x: x, file_path.split(\"\/\")))[-1] + \".py\"\r\nE       AttributeError: module 'tests.test_dataset_common' has no attribute 'split'\r\n\r\nsrc\/nlp\/load.py:169: AttributeError\r\n============================================================================== warnings summary ===============================================================================\r\n\/Users\/thomwolf\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/tensorflow_core\/python\/pywrap_tensorflow_internal.py:15\r\n  \/Users\/thomwolf\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/tensorflow_core\/python\/pywrap_tensorflow_internal.py:15: DeprecationWarning: the imp module is deprecated in favour of importlib; see the module's documentation for alternative uses\r\n    import imp\r\n\r\n-- Docs: https:\/\/docs.pytest.org\/en\/latest\/warnings.html\r\n=========================================================================== short test summary info ===========================================================================\r\nERROR tests\/test_dataset_common.py::DatasetTest::test_builder_class_snli - AttributeError: module 'tests.test_dataset_common' has no attribute 'split'\r\n========================================================================= 1 warning, 1 error in 3.63s =========================================================================\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/58\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/58\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/57","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/57\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/57\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/57\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/57","id":614261638,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE0ODUzMDM5","number":57,"title":"Better cached path","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-05-07T18:36:00Z","updated_at":"2020-05-08T13:20:30Z","closed_at":"2020-05-08T13:20:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/57","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/57","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/57.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/57.patch","merged_at":"2020-05-08T13:20:28Z"},"body":"### Changes:\r\n- The `cached_path` no longer returns None if the file is missing\/the url doesn't work. Instead, it can raise `FileNotFoundError` (missing file), `ConnectionError` (no cache and unreachable url) or `ValueError` (parsing error)\r\n- Fix requests to firebase API that doesn't handle HEAD requests...\r\n- Allow custom download in datasets script: it allows to use `tf.io.gfile.copy` for example, to download from google storage. I added an example: the `boolq` script","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/57\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/57\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/56","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/56\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/56\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/56\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/56","id":614236869,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE0ODMyODY4","number":56,"title":"[Dataset] Tester add mock function","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-07T17:51:37Z","updated_at":"2020-05-07T17:52:51Z","closed_at":"2020-05-07T17:52:50Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/56","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/56","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/56.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/56.patch","merged_at":"2020-05-07T17:52:50Z"},"body":"need to add an empty `extract()` function to make `hansard` dataset test work.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/56\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/56\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/55","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/55\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/55\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/55\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/55","id":613968072,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE0NjE0MjE1","number":55,"title":"Beam datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-05-07T11:04:32Z","updated_at":"2020-05-11T07:20:02Z","closed_at":"2020-05-11T07:20:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/55","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/55","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/55.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/55.patch","merged_at":"2020-05-11T07:20:00Z"},"body":"# Beam datasets\r\n\r\n## Intro\r\n\r\nBeam Datasets are using beam pipelines for preprocessing (basically lots of `.map` over objects called PCollections).\r\nThe advantage of apache beam is that you can choose which type of runner you want to use to preprocess your data. The main runners are:\r\n- the `DirectRunner` to run the pipeline locally (default). However I encountered memory issues for big datasets (like the french or english wikipedia). Small dataset work fine\r\n- Google Dataflow. I didn't play with it.\r\n- Spark or Flink, two well known data processing frameworks. I tried to use the Spark\/Flink local runners provided by apache beam for python and wasn't able to make them work properly though...\r\n\r\n## From tfds beam datasets to our own beam datasets\r\n\r\nTensorflow datasets used beam and a complicated pipeline to shard the TFRecords files.\r\nTo allow users to download beam datasets and not having to preprocess them, they also allow to download the already preprocessed datasets from their google storage (the beam pipeline doesn't run in that case).\r\n\r\nOn our side, we replace TFRecords by something else. Arrow or Parquet do the job but I chose Parquet as: 1) there is a builtin apache beam parquet writer that is quite convenient, and 2) reading parquet from the pyarrow library is also simple and effective (there is a mmap option !)\r\n\r\nMoreover we don't shard datasets in many many files like tfds (they were doing probably doing that mainly because of the limit of 2Gb per TFRecord file). Therefore we have a simpler pipeline that saves each split into one parquet file. We also removed the utilities to use their google storage (for now maybe ? we'll have to discuss it).\r\n\r\n## Main changes\r\n\r\n- Added a BeamWriter to save the output of beam pipelines into parquet files and fill dataset infos\r\n- Create a ParquetReader and refactor a bit the arrow_reader.py\r\n\r\n\\> **With this, we can now try to add beam datasets from tfds**\r\n\r\nI already added the wikipedia one, and I will also try to add the Wiki40b dataset\r\n\r\n## Test the wikipedia script\r\n\r\nYou can download and run the beam pipeline for wikipedia (using the `DirectRunner` by default) like this:\r\n\r\n```\r\n>>> import nlp\r\n>>> nlp.load(\"datasets\/nlp\/wikipedia\", dataset_config=\"20200501.frr\")\r\n```\r\n\r\nThis wikipedia dataset (lang: frr, North Frisian) is a small one (~10Mb), but feel free to try bigger ones (and fill 20Gb of swap memory if you try the english one lol)\r\n\r\n## Next\r\n\r\nShould we allow to download preprocessed datasets from the tfds google storage ?\r\nShould we try to optimize the beam pipelines to run locally without memory issues ?\r\nShould we try other data processing frameworks for big datasets, like spark ?\r\n\r\n\r\n## About this PR\r\n\r\nIt should be merged after #25 \r\n\r\n-----------------\r\n\r\nI'd be happy to have your feedback and your ideas to improve the processing of big datasets like wikipedia :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/55\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/55\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/54","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/54\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/54\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/54\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/54","id":613513348,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE0MjUyODkw","number":54,"title":"[Tests] Improved Error message for dummy folder structure","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-06T18:11:48Z","updated_at":"2020-05-06T18:13:00Z","closed_at":"2020-05-06T18:12:59Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/54","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/54","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/54.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/54.patch","merged_at":"2020-05-06T18:12:59Z"},"body":"Improved Error message","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/54\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/54\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/53","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/53\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/53\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/53\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/53","id":613436158,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE0MTkwMzkz","number":53,"title":"[Features] Typo in generate_from_dict","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-06T16:05:23Z","updated_at":"2020-05-07T15:28:46Z","closed_at":"2020-05-07T15:28:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/53","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/53","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/53.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/53.patch","merged_at":"2020-05-07T15:28:45Z"},"body":"Change `isinstance` test in features when generating features from dict.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/53\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/53\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/52","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/52\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/52\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/52\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/52","id":613339071,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE0MTEyMDAy","number":52,"title":"allow dummy folder structure to handle dict of lists","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-06T13:54:35Z","updated_at":"2020-05-06T13:55:19Z","closed_at":"2020-05-06T13:55:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/52","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/52","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/52.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/52.patch","merged_at":"2020-05-06T13:55:18Z"},"body":"`esnli.py` needs that extension of the dummy data testing.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/52\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/52\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/51","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/51\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/51\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/51\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/51","id":613266668,"node_id":"MDExOlB1bGxSZXF1ZXN0NDE0MDUyOTYw","number":51,"title":"[Testing] Improved testing structure","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-06T12:03:07Z","updated_at":"2020-05-07T22:07:19Z","closed_at":"2020-05-06T13:20:18Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/51","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/51","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/51.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/51.patch","merged_at":"2020-05-06T13:20:17Z"},"body":"This PR refactors the test design a bit and puts the mock download manager in the `utils` files as it is just a test helper class.\r\n\r\nas @mariamabarham pointed out, creating a dummy folder structure can be quite hard to grasp.\r\nThis PR tries to change that to some extent.\r\n\r\nIt follows the following logic for the `dummy` folder structure now:\r\n1.) The data bulider has no config -> the  `dummy` folder structure is:\r\n`dummy\/\/dummy_data.zip`\r\n2) The data builder has >= 1 configs -> the `dummy` folder structure is: \r\n`dummy\/\/\/dummy_data.zip`\r\n`dummy\/\/\/dummy_data.zip`\r\n\r\nNow, the difficult part is how to create the `dummy_data.zip` file. There are two cases:\r\nA) The `data_urs` parameter inserted into the `download_and_extract` fn is a **string**:\r\n-> the `dummy_data.zip` file zips the folder: \r\n`dummy_data\/`\r\nB) The `data_urs` parameter inserted into the `download_and_extract` fn is a **dict**:\r\n-> the `dummy_data.zip` file zips the folder: \r\n`dummy_data\/`\r\n`dummy_data\/`\r\n\r\nBy relative folder structure I mean `url_path.split('.\/')[-1]`. As an example the dataset **xquad** by deepmind has the following url path behind the key `de`: `https:\/\/github.com\/deepmind\/xquad\/blob\/master\/xquad.de.json` \r\n-> This means that the relative url path should be `xquad.de.json`.\r\n\r\n\r\n@mariamabarham B) is a change from how is was before and I think is makes more sense. \r\nWhile before the `dummy_data.zip` file for xquad with config `de` looked like:\r\n`dummy_data\/de` it would now look like `dummy_data\/xquad.de.json`. I think this is better and easier to understand. \r\n\r\nTherefore there are currently 6 tests that would have to have changed their dummy folder structure, but which can easily be done (30min). \r\n\r\nI also added a function: `print_dummy_data_folder_structure` that prints out the expected structures when testing which should be quite helpful.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/51\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/51\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/50","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/50\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/50\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/50\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/50","id":612583126,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEzNTAwMjE0","number":50,"title":"[Tests] test only for fast test as a default","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-05T12:59:22Z","updated_at":"2020-05-05T13:02:18Z","closed_at":"2020-05-05T13:02:16Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/50","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/50","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/50.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/50.patch","merged_at":"2020-05-05T13:02:16Z"},"body":"Test only for one config on circle ci to speed up testing. Add all config test as a slow test. \r\n@mariamabarham @thomwolf ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/50\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/50\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/49","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/49\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/49\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/49\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/49","id":612545483,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEzNDY5ODg0","number":49,"title":"fix flatten nested","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-05T11:55:13Z","updated_at":"2020-05-05T13:59:26Z","closed_at":"2020-05-05T13:59:25Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/49","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/49","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/49.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/49.patch","merged_at":"2020-05-05T13:59:25Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/49\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/49\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/48","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/48\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/48\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/48\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/48","id":612504687,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEzNDM2MTgz","number":48,"title":"[Command Convert] remove tensorflow import","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-05T10:41:00Z","updated_at":"2020-05-05T11:13:58Z","closed_at":"2020-05-05T11:13:56Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/48","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/48","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/48.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/48.patch","merged_at":"2020-05-05T11:13:56Z"},"body":"Remove all tensorflow import statements.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/48\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/48\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/47","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/47\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/47\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/47\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/47","id":612446493,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEzMzg5MDc1","number":47,"title":"[PyArrow Feature] fix py arrow bool","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-05T08:56:28Z","updated_at":"2020-05-05T10:40:28Z","closed_at":"2020-05-05T10:40:27Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/47","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/47","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/47.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/47.patch","merged_at":"2020-05-05T10:40:27Z"},"body":"To me it seems that `bool` can only be accessed with `bool_` when looking at the pyarrow types: https:\/\/arrow.apache.org\/docs\/python\/api\/datatypes.html. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/47\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/47\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/46","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/46\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/46\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/46\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/46","id":612398190,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEzMzUxNTY0","number":46,"title":"[Features] Strip str key before dict look-up","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-05T07:31:45Z","updated_at":"2020-05-05T08:37:45Z","closed_at":"2020-05-05T08:37:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/46","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/46","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/46.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/46.patch","merged_at":"2020-05-05T08:37:44Z"},"body":"The dataset `anli.py` currently fails because it tries to look up a key `1\\n` in a dict that only has the key `1`. Added an if statement to strip key if it cannot be found in dict.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/46\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/46\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/45","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/45\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/45\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/45\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/45","id":612386583,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEzMzQzMjAy","number":45,"title":"[Load] Separate Module kwargs and builder kwargs.","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-05T07:09:54Z","updated_at":"2020-05-08T09:51:22Z","closed_at":"2020-05-08T09:51:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/45","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/45","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/45.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/45.patch","merged_at":null},"body":"Kwargs for the `load_module` fn should be passed with `module_xxxx` to `builder_kwargs` of `load` fn.\r\n\r\nThis is a follow-up PR of: https:\/\/github.com\/huggingface\/nlp\/pull\/41","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/45\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/45\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/44","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/44\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/44\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/44\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/44","id":611873486,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyOTUwMzU1","number":44,"title":"[Tests] Fix tests for datasets with no config","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-04T13:25:38Z","updated_at":"2020-05-04T13:28:04Z","closed_at":"2020-05-04T13:28:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/44","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/44","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/44.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/44.patch","merged_at":"2020-05-04T13:28:03Z"},"body":"Forgot to fix `None` problem for datasets that have no config this in PR: https:\/\/github.com\/huggingface\/nlp\/pull\/42","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/44\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/44\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/43","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/43\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/43\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/43\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/43","id":611773279,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyODcxNTE5","number":43,"title":"[Checksums] If no configs exist prevent to run over empty list","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-04T10:39:42Z","updated_at":"2020-05-04T13:18:03Z","closed_at":"2020-05-04T13:18:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/43","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/43","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/43.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/43.patch","merged_at":null},"body":"`movie_rationales` e.g. has no configs.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/43\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/43\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/42","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/42\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/42\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/42\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/42","id":611754343,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyODU1OTE2","number":42,"title":"[Tests] allow tests for builders without config","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-04T10:06:22Z","updated_at":"2020-05-04T13:10:50Z","closed_at":"2020-05-04T13:10:48Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/42","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/42","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/42.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/42.patch","merged_at":"2020-05-04T13:10:48Z"},"body":"Some dataset scripts have no configs - the tests have to be adapted for this case. \r\nIn this case the dummy data will be saved as:\r\n- natural_questions\r\n  -> dummy\r\n  -> -> 1.0.0 (version num)\r\n  -> -> -> dummy_data.zip\r\n   ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/42\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/42\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/41","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/41\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/41\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/41\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/41","id":611739219,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyODQzNDQy","number":41,"title":"[Load module] allow kwargs into load module","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-04T09:42:11Z","updated_at":"2020-05-04T19:39:07Z","closed_at":"2020-05-04T19:39:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/41","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/41","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/41.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/41.patch","merged_at":"2020-05-04T19:39:06Z"},"body":"Currenly it is not possible to force a re-download of the dataset script. \r\n\r\nThis simple change allows to pass ``force_reload=True`` as ``builder_kwargs`` in the ``load.py`` function.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/41\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/41\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/40","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/40\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/40\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/40\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/40","id":611721308,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyODI4NzU2","number":40,"title":"Update remote checksums instead of overwrite","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-04T09:13:14Z","updated_at":"2020-05-04T11:51:51Z","closed_at":"2020-05-04T11:51:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/40","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/40","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/40.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/40.patch","merged_at":"2020-05-04T11:51:49Z"},"body":"When the user uploads a dataset on S3, checksums are also uploaded with the `--upload_checksums` parameter.\r\n\r\nIf the user uploads the dataset in several steps, then the remote checksums file was previously overwritten. Now it's going to be updated with the new checksums.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/40\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/40\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/39","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/39\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/39\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/39\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/39","id":611712135,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyODIxNTA4","number":39,"title":"[Test] improve slow testing","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-04T08:58:33Z","updated_at":"2020-05-04T08:59:50Z","closed_at":"2020-05-04T08:59:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/39","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/39","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/39.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/39.patch","merged_at":"2020-05-04T08:59:49Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/39\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/39\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/38","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/38\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/38\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/38\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/38","id":611677656,"node_id":"MDU6SXNzdWU2MTE2Nzc2NTY=","number":38,"title":"[Checksums] Error for some datasets","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2020-05-04T08:00:16Z","updated_at":"2020-05-04T09:48:20Z","closed_at":"2020-05-04T09:48:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The checksums command works very nicely for `squad`. But for `crime_and_punish` and `xnli`, \r\nthe same bug happens:\r\n\r\nWhen running: \r\n```\r\npython nlp-cli nlp-cli test xnli --save_checksums\r\n```\r\n\r\nleads to:\r\n\r\n```\r\n  File \"nlp-cli\", line 33, in \r\n    service.run()\r\n  File \"\/home\/patrick\/python_bin\/nlp\/commands\/test.py\", line 61, in run\r\n    ignore_checksums=self._ignore_checksums,\r\n  File \"\/home\/patrick\/python_bin\/nlp\/builder.py\", line 383, in download_and_prepare\r\n    self._download_and_prepare(dl_manager=dl_manager, download_config=download_config)\r\n  File \"\/home\/patrick\/python_bin\/nlp\/builder.py\", line 627, in _download_and_prepare\r\n    dl_manager=dl_manager, max_examples_per_split=download_config.max_examples_per_split,\r\n  File \"\/home\/patrick\/python_bin\/nlp\/builder.py\", line 431, in _download_and_prepare\r\n    split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n  File \"\/home\/patrick\/python_bin\/nlp\/datasets\/xnli\/8bf4185a2da1ef2a523186dd660d9adcf0946189e7fa5942ea31c63c07b68a7f\/xnli.py\", line 95, in _split_generators\r\n    dl_dir = dl_manager.download_and_extract(_DATA_URL)\r\n  File \"\/home\/patrick\/python_bin\/nlp\/utils\/download_manager.py\", line 246, in download_and_extract\r\n    return self.extract(self.download(url_or_urls))\r\n  File \"\/home\/patrick\/python_bin\/nlp\/utils\/download_manager.py\", line 186, in download\r\n    self._record_sizes_checksums(url_or_urls, downloaded_path_or_paths)\r\n  File \"\/home\/patrick\/python_bin\/nlp\/utils\/download_manager.py\", line 166, in _record_sizes_checksums\r\n    self._recorded_sizes_checksums[url] = get_size_checksum(path)\r\n  File \"\/home\/patrick\/python_bin\/nlp\/utils\/checksums_utils.py\", line 81, in get_size_checksum\r\n    with open(path, \"rb\") as f:\r\nTypeError: expected str, bytes or os.PathLike object, not tuple\r\n```\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/38\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/38\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/37","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/37\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/37\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/37\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/37","id":611670295,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyNzg5MjQ4","number":37,"title":"[Datasets ToDo-List] add datasets","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":8,"created_at":"2020-05-04T07:47:39Z","updated_at":"2020-05-08T13:48:23Z","closed_at":"2020-05-08T13:48:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":true,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/37","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/37","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/37.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/37.patch","merged_at":null},"body":"## Description\r\n\r\nThis PR acts as a dashboard to see which datasets are added to the library and work. \r\n\r\nCicle-ci should always be green so that we can be sure that newly added datasets are functional. \r\nThis PR should not be merged.\r\n\r\n\r\n## Progress\r\n\r\n**For the following datasets the test commands**:\r\n```\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::DatasetTest::test_load_real_dataset_\r\n```\r\nand \r\n```\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::DatasetTest::test_load_dataset_all_configs_\r\n```\r\n\r\n**passes**.\r\n\r\n- [x] Squad\r\n- [x] Sentiment140\r\n- [x] XNLI\r\n- [x] Crime_and_Punish\r\n- [x] movie_rationales\r\n- [x] ai2_arc\r\n- [x] anli\r\n- [x] event2Mind\r\n- [x] Fquad\r\n- [x] blimp\r\n- [x] empathetic_dialogues\r\n- [x] cosmos_qa\r\n- [x] xquad\r\n- [x] blog_authorship_corpus\r\n- [x] SNLI\r\n- [x] break_data\r\n- [x] SQuAD v2\r\n- [x] cfq\r\n- [x] eraser_multi_rc\r\n- [x] Glue\r\n- [x] Tydiqa\r\n- [x] wiki_qa\r\n- [x] wikitext\r\n- [x] winogrande\r\n- [x] wiqa\r\n- [x] esnli\r\n- [x] civil_comments\r\n- [x] commonsense_qa\r\n- [x] com_qa\r\n- [x] coqa\r\n- [x] wiki_split\r\n- [x] cos_e\r\n- [x] xcopa\r\n- [x] quarel\r\n- [x] quartz\r\n- [x] squad_it\r\n- [x] quoref \r\n- [x] squad_pt\r\n- [x] cornell_movie_dialog\r\n- [x] SciQ\r\n- [x] Scifact\r\n- [x] hellaswag\r\n- [x] ted_multi (in translate)\r\n- [x] Aeslc (summarization)\r\n- [x] drop\r\n- [x] gap\r\n- [x] hansard\r\n- [x] opinosis\r\n- [x] MLQA\r\n- [x] math_dataset\r\n\r\n## How-To-Add a dataset\r\n\r\n**Before adding a dataset make sure that your branch is up to date**:\r\n1. `git checkout add_datasets`\r\n2. `git pull`\r\n\r\n**Add a dataset via the `convert_dataset.sh` bash script:**  \r\n\r\nRunning `bash convert_dataset.sh ` (*e.g.* `bash convert_dataset.sh ..\/tensorflow-datasets\/tensorflow_datasets\/text\/movie_rationales.py`) will automatically run all the steps mentioned in **Add a dataset manually** below. \r\n\r\nMake sure that you run `convert_dataset.sh` from the root folder of `nlp`.\r\n\r\nThe conversion script should work almost always for step 1): \"convert dataset script from tfds to nlp format\" and 2) \"create checksum file\" and step 3) \"make style\".\r\n\r\nIt can also sometimes automatically run step 4) \"create the correct dummy data from tfds\", but this will only work if a) there is either no config name or only one config name and b) the `tfds testing\/test_data\/fake_example` is in the correct form.\r\n\r\nNevertheless, the script should always be run in the beginning until an error occurs to be more efficient. \r\n\r\nIf the conversion script does not work or fails at some step, then you can run the steps manually as follows:\r\n\r\n**Add a dataset manually** \r\n\r\nMake sure you run all of the following commands from the root of your `nlp` git clone.\r\nAlso make sure that you changed to this branch:\r\n```\r\ngit checkout add_datasets\r\n```\r\n\r\n1) the tfds datascript file should be converted to `nlp` style:\r\n\r\n```\r\npython nlp-cli convert --tfds_path .py --nlp_directory datasets\/nlp\r\n```\r\n\r\nThis will convert the tdfs script and create a folder with the correct name.\r\n\r\n2) the checksum file should be added. Use the command:\r\n```\r\npython nlp-cli test datasets\/nlp\/ --save_checksums --all_configs\r\n```\r\n\r\nA checksums.txt file should be created in your folder and the structure should look as follows:\r\n\r\nsquad\/\r\n\u251c\u2500\u2500 squad.py\/\r\n\u2514\u2500\u2500 urls_checksums\/\r\n...........\u2514\u2500\u2500 checksums.txt\r\n\r\nDelete the created `*.lock` file afterward - it should not be uploaded to AWS.\r\n\r\n3) run black and isort on your newly added datascript files so that they look nice:\r\n\r\n```\r\nmake style\r\n```\r\n\r\n4) the dummy data should be added. For this it might be useful to take a look into the structure of other examples as shown in the PR here and at `` whether the same  data can be used.\r\n\r\n5)  the data can be uploaded to AWS using the command\r\n```\r\naws s3 cp datasets\/nlp\/ s3:\/\/datasets.huggingface.co\/nlp\/ --recursive\r\n```\r\n\r\n6) check whether all works as expected using: \r\n```\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::DatasetTest::test_load_real_dataset_\r\n```\r\nand \r\n```\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::DatasetTest::test_load_dataset_all_configs_\r\n```\r\n\r\n7) push to this PR and rerun the circle ci workflow to check whether circle ci stays green.\r\n\r\n8) Edit this commend and tick off your newly added dataset :-) \r\n\r\n## TODO-list\r\n\r\nMaybe we can add a TODO-list here for everybody that feels like adding new datasets so that we will not add the same datasets.\r\n\r\nHere a link to available datasets: https:\/\/docs.google.com\/spreadsheets\/d\/1zOtEqOrnVQwdgkC4nJrTY6d-Av02u0XFzeKAtBM2fUI\/edit#gid=0\r\n\r\nPatrick:\r\n\r\n- [ ] boolq - *weird download link*\r\n- [ ] c4 - *beam dataset*","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/37\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/37\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/36","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/36\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/36\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/36\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/36","id":611528349,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyNjgwOTk1","number":36,"title":"Metrics - refactoring, adding support for download and distributed metrics","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-05-03T23:00:17Z","updated_at":"2020-05-11T08:16:02Z","closed_at":"2020-05-11T08:16:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/36","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/36","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/36.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/36.patch","merged_at":"2020-05-11T08:16:00Z"},"body":"Refactoring metrics to have a similar loading API than the datasets and improving the import system.\r\n\r\n# Import system\r\nThe import system has ben upgraded. There are now three types of imports allowed:\r\n1. `library` imports (identified as \"absolute imports\")\r\n```python\r\nimport seqeval\r\n```\r\n=> we'll test all the imports before running the scripts and if one cannot be imported we'll display an error message like this one:\r\n`ImportError: To be able to use this metric\/dataset, you need to install the following dependencies ['seqeval'] using 'pip install seqeval' for instance'`\r\n\r\n2. `internal` imports (identified as \"relative imports\")\r\n```python\r\nimport .c4_utils\r\n```\r\n=> we'll assume this point to a file in the same directory\/S3-directory as the main script and download this file.\r\n\r\n2. `external` imports (identified as \"relative imports\" with a comment starting with `# From:`)\r\n```python\r\nfrom .nmt_bleu import compute_bleu  # From: https:\/\/github.com\/tensorflow\/nmt\/blob\/master\/nmt\/scripts\/bleu.py\r\n```\r\n=> we'll assume this point to the URL of a python script (if it's a link to a github file, we'll take the raw file automatically).\r\n=> the script is downloaded and renamed to the import name (here above renamed from `bleu.py` to `nmt_bleu.py`). Renaming the file can be necessary if the distant file has the same name as the dataset\/metric processing script. If you forgot to rename the distant script and it has the same name as the dataset\/metric, you'll have an explicit error message asking to rename the import anyway.\r\n\r\n# Hosting metrics\r\n\r\nMetrics are hosted on a S3 bucket like the dataset processing scripts.\r\n\r\n# Metrics scripts\r\n\r\nMetrics scripts have a lot in common with datasets processing scripts. They also have a `metric.info` including citations, descriptions and links to relevant pages.\r\n\r\nMetrics have more documentation to supply to ensure they are used well.\r\n\r\nFour examples are already included for reference in [.\/metrics](.\/metrics): BLEU, ROUGE, SacreBLEU and SeqEVAL.\r\n\r\n# Automatic support for distributed\/multi-processing metric computation\r\n\r\nWe've also added support for automatic distributed\/multi-processing metric computation (e.g. when using DistributedDataParallel). We leverage our own dataset format for smart caching in this case. \r\n\r\nHere is a quick gist of a standard use of metrics (the simplest usage):\r\n```python\r\nimport nlp\r\nbleu_metric = nlp.load_metric('bleu')\r\n\r\n# If you only have a single iteration, you can easily compute the score like this\r\npredictions = model(inputs)\r\nscore = bleu_metric.compute(predictions, references)\r\n\r\n# If you have a loop, you can \"add\" your predictions and references at each iteration instead of having to save them yourself (the metric object store them efficiently for you)\r\nfor batch in dataloader:\r\n    model_input, targets = batch\r\n    predictions = model(model_inputs)\r\n    bleu.add(predictions, targets)\r\nscore = bleu_metric.compute()  # Compute the score from all the stored predictions\/references\r\n```\r\n\r\nHere is a quick gist of a use in a distributed torch setup (should work for any python multi-process setup actually). It's pretty much identical to the second example above:\r\n```python\r\nimport nlp\r\n# You need to give the total number of parallel python processes (num_process) and the id of each process (process_id)\r\nbleu = nlp.load_metric('bleu', process_id=torch.distributed.get_rank(),b num_process=torch.distributed.get_world_size())\r\n\r\nfor batch in dataloader:\r\n    model_input, targets = batch\r\n    predictions = model(model_inputs)\r\n    bleu.add(predictions, targets)\r\nscore = bleu_metric.compute()  # Compute the score on the first node by default (can be set to compute on each node as well)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/36\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/36\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/35","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/35\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/35\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/35\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/35","id":611413731,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyNjAyMTc0","number":35,"title":"[Tests] fix typo","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-03T13:23:49Z","updated_at":"2020-05-03T13:24:21Z","closed_at":"2020-05-03T13:24:20Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/35","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/35","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/35.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/35.patch","merged_at":"2020-05-03T13:24:20Z"},"body":"@lhoestq - currently the slow test fail with:\r\n\r\n```\r\n_____________________________________________________________________________________ DatasetTest.test_load_real_dataset_xnli _____________________________________________________________________________________\r\n                                                                     \r\nself = , dataset_name = 'xnli'\r\n                                   \r\n    @slow                                                                                               \r\n    def test_load_real_dataset(self, dataset_name):\r\n        with tempfile.TemporaryDirectory() as temp_data_dir:                                                                                                                                                      \r\n>           dataset = load(dataset_name, data_dir=temp_data_dir)\r\n                                                                                                                                                                                                                   \r\ntests\/test_dataset_common.py:153:                                                                                                                                                                                  \r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\r\n..\/..\/python_bin\/nlp\/load.py:497: in load                                                                     \r\n    dbuilder.download_and_prepare(**download_and_prepare_kwargs)\r\n..\/..\/python_bin\/nlp\/builder.py:383: in download_and_prepare\r\n    self._download_and_prepare(dl_manager=dl_manager, download_config=download_config)\r\n..\/..\/python_bin\/nlp\/builder.py:627: in _download_and_prepare\r\n    dl_manager=dl_manager, max_examples_per_split=download_config.max_examples_per_split,\r\n..\/..\/python_bin\/nlp\/builder.py:431: in _download_and_prepare\r\n    split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n..\/..\/python_bin\/nlp\/datasets\/xnli\/8bf4185a2da1ef2a523186dd660d9adcf0946189e7fa5942ea31c63c07b68a7f\/xnli.py:95: in _split_generators                                                                               \r\n    dl_dir = dl_manager.download_and_extract(_DATA_URL)\r\n..\/..\/python_bin\/nlp\/utils\/download_manager.py:246: in download_and_extract\r\n    return self.extract(self.download(url_or_urls))         \r\n..\/..\/python_bin\/nlp\/utils\/download_manager.py:186: in download                       \r\n    self._record_sizes_checksums(url_or_urls, downloaded_path_or_paths)\r\n..\/..\/python_bin\/nlp\/utils\/download_manager.py:166: in _record_sizes_checksums           \r\n    self._recorded_sizes_checksums[url] = get_size_checksum(path)\r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\r\n                                                                              \r\npath = ('', '\/tmp\/tmpkajlg9yc\/downloads\/c0f7773c480a3f2d85639d777e0e17e65527460310d80760fd3fc2b2f2960556.c952a63cb17d3d46e412ceb7dbcd656ce2b15cc9ef17f50c28f81c48a7c853b5')\r\n                                                                                                                                                                                                                   \r\n    def get_size_checksum(path: str) -> Tuple[int, str]:\r\n        \"\"\"Compute the file size and the sha256 checksum of a file\"\"\"                                                                                            \r\n        m = sha256()\r\n>       with open(path, \"rb\") as f:                       \r\nE       TypeError: expected str, bytes or os.PathLike object, not tuple\r\n                                              \r\n..\/..\/python_bin\/nlp\/utils\/checksums_utils.py:81: TypeError     \r\n```\r\n\r\n- the checksums probably need to be updated no? And we should also think about how to write a test for the checksums.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/35\/reactions","total_count":1,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":1},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/35\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/34","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/34\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/34\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/34\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/34","id":611385516,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyNTg0OTM0","number":34,"title":"[Tests] add slow tests","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-03T11:01:22Z","updated_at":"2020-05-03T12:18:30Z","closed_at":"2020-05-03T12:18:29Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/34","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/34","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/34.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/34.patch","merged_at":"2020-05-03T12:18:29Z"},"body":"This PR adds a slow test that downloads the \"real\" dataset. The test is decorated as \"slow\" so that it will not automatically run on circle ci.\r\n\r\nBefore uploading a dataset, one should test that this test passes, manually by running \r\n\r\n```\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::DatasetTest::test_load_real_dataset_\r\n```\r\n\r\nThis PR should be merged after PR: #33 ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/34\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/34\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/33","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/33\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/33\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/33\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/33","id":611052081,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyMzU1ODE0","number":33,"title":"Big cleanup\/refactoring for clean serialization","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-05-01T23:45:57Z","updated_at":"2020-05-03T12:17:34Z","closed_at":"2020-05-03T12:17:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/33","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/33","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/33.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/33.patch","merged_at":"2020-05-03T12:17:33Z"},"body":"This PR cleans many base classes to re-build them as `dataclasses`. We can thus use a simple serialization workflow for `DatasetInfo`, including it's `Features` and `SplitDict` based on `dataclasses` `asdict()`.\r\n\r\nThe resulting code is a lot shorter, can be easily serialized\/deserialized, dataset info are human-readable and we can get rid of the `dataclass_json` dependency.\r\n\r\nThe scripts have breaking changes and the conversion tool is updated.\r\n\r\nExample of dataset info in SQuAD script now:\r\n```python\r\n  def _info(self):\r\n    return nlp.DatasetInfo(\r\n        description=_DESCRIPTION,\r\n        features=nlp.Features({\r\n            \"id\":\r\n                nlp.Value('string'),\r\n            \"title\":\r\n                nlp.Value('string'),\r\n            \"context\":\r\n                nlp.Value('string'),\r\n            \"question\":\r\n                nlp.Value('string'),\r\n            \"answers\":\r\n                nlp.Sequence({\r\n                    \"text\": nlp.Value('string'),\r\n                    \"answer_start\": nlp.Value('int32'),\r\n                }),\r\n        }),\r\n        # No default supervised_keys (as we have to pass both question\r\n        # and context as input).\r\n        supervised_keys=None,\r\n        homepage=\"https:\/\/rajpurkar.github.io\/SQuAD-explorer\/\",\r\n        citation=_CITATION,\r\n    )\r\n```\r\n\r\nExample of serialized dataset info:\r\n```bash\r\n{\r\n    \"description\": \"Stanford Question Answering Dataset (SQuAD) is a reading comprehension dataset, consisting of questions posed by crowdworkers on a set of Wikipedia articles, where the answer to every question is a segment of text, or span, from the corresponding reading passage, or the question might be unanswerable.\\n\",\r\n    \"citation\": \"@article{2016arXiv160605250R,\\n             author = {{Rajpurkar}, Pranav and {Zhang}, Jian and {Lopyrev},\\n                                 Konstantin and {Liang}, Percy},\\n                title = \\\"{SQuAD: 100,000+ Questions for Machine Comprehension of Text}\\\",\\n            journal = {arXiv e-prints},\\n                 year = 2016,\\n                    eid = {arXiv:1606.05250},\\n                pages = {arXiv:1606.05250},\\narchivePrefix = {arXiv},\\n             eprint = {1606.05250},\\n}\\n\",\r\n    \"homepage\": \"https:\/\/rajpurkar.github.io\/SQuAD-explorer\/\",\r\n    \"license\": \"\",\r\n    \"features\": {\r\n        \"id\": {\r\n            \"dtype\": \"string\",\r\n            \"_type\": \"Value\"\r\n        },\r\n        \"title\": {\r\n            \"dtype\": \"string\",\r\n            \"_type\": \"Value\"\r\n        },\r\n        \"context\": {\r\n            \"dtype\": \"string\",\r\n            \"_type\": \"Value\"\r\n        },\r\n        \"question\": {\r\n            \"dtype\": \"string\",\r\n            \"_type\": \"Value\"\r\n        },\r\n        \"answers\": {\r\n            \"feature\": {\r\n                \"text\": {\r\n                    \"dtype\": \"string\",\r\n                    \"_type\": \"Value\"\r\n                },\r\n                \"answer_start\": {\r\n                    \"dtype\": \"int32\",\r\n                    \"_type\": \"Value\"\r\n                }\r\n            },\r\n            \"length\": -1,\r\n            \"_type\": \"Sequence\"\r\n        }\r\n    },\r\n    \"supervised_keys\": null,\r\n    \"name\": \"squad\",\r\n    \"version\": {\r\n        \"version_str\": \"1.0.0\",\r\n        \"description\": \"New split API (https:\/\/tensorflow.org\/datasets\/splits)\",\r\n        \"nlp_version_to_prepare\": null,\r\n        \"major\": 1,\r\n        \"minor\": 0,\r\n        \"patch\": 0\r\n    },\r\n    \"splits\": {\r\n        \"train\": {\r\n            \"name\": \"train\",\r\n            \"num_bytes\": 79426386,\r\n            \"num_examples\": 87599,\r\n            \"dataset_name\": \"squad\"\r\n        },\r\n        \"validation\": {\r\n            \"name\": \"validation\",\r\n            \"num_bytes\": 10491883,\r\n            \"num_examples\": 10570,\r\n            \"dataset_name\": \"squad\"\r\n        }\r\n    },\r\n    \"size_in_bytes\": 0,\r\n    \"download_size\": 35142551,\r\n    \"download_checksums\": []\r\n}\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/33\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/33\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/32","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/32\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/32\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/32\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/32","id":610715580,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyMTAzMzIx","number":32,"title":"Fix map caching notebooks","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-01T11:55:26Z","updated_at":"2020-05-03T12:15:58Z","closed_at":"2020-05-03T12:15:57Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/32","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/32","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/32.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/32.patch","merged_at":"2020-05-03T12:15:57Z"},"body":"Previously, caching results with `.map()` didn't work in notebooks.\r\nTo reuse a result, `.map()` serializes the functions with `dill.dumps` and then it hashes it.\r\n\r\nThe problem is that when using `dill.dumps` to serialize a function, it also saves its origin (filename + line no.) and the origin of all the `globals` this function needs. However for notebooks and shells, the filename looks like \\ and the line no. changes often.\r\n\r\nTo fix the problem, I added a new dispatch function for code objects that ignore the origin of the code if it comes from a notebook or a python shell.\r\n\r\nI tested these cases in a notebook:\r\n- lambda functions\r\n- named functions\r\n- methods\r\n- classmethods\r\n- staticmethods\r\n- classes that implement `__call__`\r\n\r\nThe caching now works as expected for all of them :)\r\nI also tested the caching in the demo notebook and it works fine !","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/32\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/32\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/31","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/31\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/31\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/31\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/31","id":610677641,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEyMDczNDE4","number":31,"title":"[Circle ci] Install a virtual env before running tests","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-01T10:11:17Z","updated_at":"2020-05-01T22:06:16Z","closed_at":"2020-05-01T22:06:15Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/31","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/31","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/31.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/31.patch","merged_at":"2020-05-01T22:06:15Z"},"body":"Install a virtual env before running tests to not running into sudo issues when dynamically downloading files. \r\n\r\nSame number of tests now pass \/ fail as on my local computer: \r\n![Screenshot from 2020-05-01 12-14-44](https:\/\/user-images.githubusercontent.com\/23423619\/80798814-8a0a0a80-8ba5-11ea-8db8-599d33bbfccd.png)\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/31\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/31\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/30","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/30\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/30\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/30\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/30","id":610549072,"node_id":"MDExOlB1bGxSZXF1ZXN0NDExOTY4Mzk3","number":30,"title":"add metrics which require download files from github","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-05-01T04:13:22Z","updated_at":"2020-05-11T08:19:54Z","closed_at":"2020-05-11T08:19:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/30","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/30","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/30.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/30.patch","merged_at":null},"body":"To download files from github, I copied the `load_dataset_module` and its dependencies (without the builder) in `load.py` to `metrics\/metric_utils.py`. I made the following changes:\r\n\r\n- copy the needed files in a folder`metric_name` \r\n- delete all other files that are not needed\r\n\r\nFor metrics that require an external import, I first create a `_imports.py` file which contains all external urls. Then I create a `.py` in which I will load the external files using `_imports.py` ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/30\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/30\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/29","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/29\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/29\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/29\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/29","id":610243997,"node_id":"MDExOlB1bGxSZXF1ZXN0NDExNzIwODMx","number":29,"title":"Hf_api small changes","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-04-30T17:06:43Z","updated_at":"2020-04-30T19:51:45Z","closed_at":"2020-04-30T19:51:44Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/29","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/29","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/29.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/29.patch","merged_at":"2020-04-30T19:51:44Z"},"body":"From Patrick: \r\n```python \r\nfrom nlp import hf_api\r\napi = hf_api.HfApi()\r\napi.dataset_list()\r\n```\r\n\r\nworks :-) ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/29\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/29\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/28","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/28\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/28\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/28\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/28","id":610241907,"node_id":"MDExOlB1bGxSZXF1ZXN0NDExNzE5MTQy","number":28,"title":"[Circle ci] Adds circle ci config","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-30T17:03:35Z","updated_at":"2020-04-30T19:51:09Z","closed_at":"2020-04-30T19:51:08Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/28","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/28","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/28.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/28.patch","merged_at":"2020-04-30T19:51:08Z"},"body":"@thomwolf can you take a look and set up circle ci on: \r\nhttps:\/\/app.circleci.com\/projects\/project-dashboard\/github\/huggingface\r\n\r\nI think for `nlp` only admins can set it up, which I guess is you :-) ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/28\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/28\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/27","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/27\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/27\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/27\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/27","id":610230476,"node_id":"MDExOlB1bGxSZXF1ZXN0NDExNzA5OTc0","number":27,"title":"[Cleanup] Removes all files in testing except test_dataset_common","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-30T16:45:21Z","updated_at":"2020-04-30T17:39:25Z","closed_at":"2020-04-30T17:39:23Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/27","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/27","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/27.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/27.patch","merged_at":"2020-04-30T17:39:23Z"},"body":"As far as I know, all files in `tests` were old `tfds test files` so I removed them. We can still look them up on the other library. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/27\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/27\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/26","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/26\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/26\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/26\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/26","id":610226047,"node_id":"MDExOlB1bGxSZXF1ZXN0NDExNzA2NjA2","number":26,"title":"[Tests] Clean tests","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-30T16:38:29Z","updated_at":"2020-04-30T20:12:04Z","closed_at":"2020-04-30T20:12:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/26","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/26","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/26.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/26.patch","merged_at":"2020-04-30T20:12:03Z"},"body":"the abseil testing library (https:\/\/abseil.io\/docs\/python\/quickstart.html) is better than the one I had before, so I decided to switch to that and changed the `setup.py` config file. \r\nAbseil has more support and a cleaner API for parametrized testing I think. \r\n\r\nI added a list of all dataset scripts that are currently on AWS, but will replace that once the \r\nAPI is integrated into this lib. \r\n\r\nOne can now easily test for just a single function for a single dataset with:\r\n`tests\/test_dataset_common.py::DatasetTest::test_load_dataset_wikipedia` \r\n\r\nNOTE: This PR is rebased on PR #29 so should be merged after.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/26\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/26\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/25","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/25\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/25\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/25\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/25","id":609708863,"node_id":"MDExOlB1bGxSZXF1ZXN0NDExMjQ4Nzg2","number":25,"title":"Add script csv datasets","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-04-30T08:28:08Z","updated_at":"2020-05-08T17:31:51Z","closed_at":"2020-05-07T21:14:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/25","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/25","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/25.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/25.patch","merged_at":"2020-05-07T21:14:49Z"},"body":"This is a PR allowing to create datasets from local CSV files. A usage might be:\r\n\r\n```python\r\nimport nlp\r\nds = nlp.load(\r\n    path=\"csv\",\r\n    name=\"bbc\",\r\n    dataset_files={\r\n        nlp.Split.TRAIN: [\"datasets\/dummy_data\/csv\/train.csv\"],\r\n        nlp.Split.TEST: [\"\"datasets\/dummy_data\/csv\/test.csv\"\"]\r\n    },\r\n    csv_kwargs={\r\n        \"skip_rows\": 0,\r\n        \"delimiter\": \",\",\r\n        \"quote_char\": \"\\\"\",\r\n        \"header_as_column_names\": True\r\n    }\r\n)\r\n```\r\n\r\n```\r\nDownloading and preparing dataset bbc\/1.0.0 (download: Unknown size, generated: Unknown size, total: Unknown size) to \/home\/jplu\/.cache\/huggingface\/datasets\/bbc\/1.0.0...\r\nDataset bbc downloaded and prepared to \/home\/jplu\/.cache\/huggingface\/datasets\/bbc\/1.0.0. Subsequent calls will reuse this data.\r\n{'test': Dataset(schema: {'category': 'string', 'text': 'string'}, num_rows: 49), 'train': Dataset(schema: {'category': 'string', 'text': 'string'}, num_rows: 99), 'validation': Dataset(schema: {'category': 'string', 'text': 'string'}, num_rows: 0)}\r\n```\r\n\r\nHow it is read:\r\n\r\n- `path`: the `csv` word means \"I want to create a CSV dataset\"\r\n- `name`: the name of this dataset is `bbc`\r\n- `dataset_files`: this is a dictionary where each key is the list of files corresponding to the key split.\r\n- `csv_kwargs`: this is the keywords arguments to \"explain\" how to read the CSV files\r\n  * `skip_rows`: number of rows have to be skipped, starting from the beginning of the file\r\n  * `delimiter`: which delimiter is used to separate the columns\r\n  * `quote_char`: which quote char is used to represent a column where the delimiter appears in one of them\r\n  * `header_as_column_names`: will use the first row (header) of the file as name for the features. Otherwise the names will be automatically generated as `f1`, `f2`, etc... Will be applied after the `skip_rows` parameter.\r\n\r\n**TODO**:  for now the `csv.py` is copied each time we create a new dataset as `ds_name.py`, this behavior will be modified to have only the `csv.py` script copied only once and not for all the CSV datasets.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/25\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/25\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/24","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/24\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/24\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/24\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/24","id":609064987,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEwNzE5MTU0","number":24,"title":"Add checksums","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-04-29T13:37:29Z","updated_at":"2020-04-30T19:52:50Z","closed_at":"2020-04-30T19:52:49Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/24","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/24","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/24.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/24.patch","merged_at":"2020-04-30T19:52:49Z"},"body":"### Checksums files\r\n\r\nThey are stored next to the dataset script in urls_checksums\/checksums.txt.\r\nThey are used to check the integrity of the datasets downloaded files.\r\nI kept the same format as tensorflow-datasets.\r\nThere is one checksums file for all configs.\r\n\r\n### Load a dataset\r\n\r\nWhen you do `load(\"squad\")`, it will also download the checksums file and put it next to the script in nlp\/datasets\/hash\/urls_checksums\/checksums.txt.\r\nIt also verifies that the downloaded files checksums match the expected ones.\r\n\r\nYou can ignore checksum tests with `load(\"squad\", ignore_checksums=True)` (under the hood it just adds `ignore_checksums=True` in the `DownloadConfig`)\r\n\r\n### Test a dataset\r\n\r\nThere is a new command `nlp-cli test squad` that runs `download_and_prepare` to see if it runs ok, and that verifies that all the checksums match. Allowed arguments are `--name`, `--all_configs`, `--ignore_checksums` and `--register_checksums`.\r\n\r\n### Register checksums\r\n\r\n1. If the dataset has external dataset files\r\n\r\nThe command `nlp-cli test squad --register_checksums --all_configs` runs `download_and_prepare` on all configs to see if it runs ok, and it creates the checksums file.\r\nYou can also register one config at a time using `--name` instead ; the checksums file will be completed and not overwritten.\r\n\r\nIf the script is a local script, the checksum file is moved to urls_checksums\/checksums.txt next to the local script, to enable the user to upload both the script and the checksums file afterwards with `nlp-cli upload squad`.\r\n\r\n2. If the dataset files are all inside the directory of the dataset script\r\n\r\nThe user can directly do `nlp-cli upload squad --register_checksums`, as there is no need to download anything.\r\nIn this case however, all the dataset must be uploaded at once.\r\n\r\n--\r\n\r\nPS : it doesn't allow to register checksums for canonical datasets, the file has to be added manually on S3 for now (I guess ?)\r\n\r\nAlso I feel like we must be sure that this processes would not constrain too much any user from uploading its dataset.\r\nLet me know what you think :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/24\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/24\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/23","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/23\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/23\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/23\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/23","id":608508706,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEwMjczOTU2","number":23,"title":"Add metrics","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-28T18:02:05Z","updated_at":"2020-05-11T08:19:38Z","closed_at":"2020-05-11T08:19:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/23","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/23","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/23.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/23.patch","merged_at":null},"body":"This PR is a draft for adding metrics (sacrebleu and seqeval are added)\r\n\r\nuse case examples:\r\n`import nlp`\r\n**sacrebleu:**\r\n```\r\nrefs = [['The dog bit the man.', 'It was not unexpected.', 'The man bit him first.'],\r\n        ['The dog had bit the man.', 'No one was surprised.', 'The man had bitten the dog.']]\r\nsys = ['The dog bit the man.', \"It wasn't surprising.\", 'The man had just bitten him.']\r\nsacrebleu = nlp.load_metrics('sacrebleu')\r\nprint(sacrebleu.score)\r\n```\r\n\r\n**seqeval:**\r\n```\r\ny_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]\r\ny_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]\r\nseqeval = nlp.load_metrics('seqeval')\r\nprint(seqeval.accuracy_score(y_true, y_pred)\r\nprint(seqeval.f1_score(y_true, y_pred)\r\n```\r\n_examples are taken from the corresponding web page_\r\n\r\nyour comments and suggestions are more than welcomed\r\n\r\n\r\n\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/23\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/23\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/22","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/22\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/22\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/22\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/22","id":608298586,"node_id":"MDExOlB1bGxSZXF1ZXN0NDEwMTAyMjU3","number":22,"title":"adding bleu score code","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-28T13:00:50Z","updated_at":"2020-04-28T17:48:20Z","closed_at":"2020-04-28T17:48:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/22","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/22","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/22.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/22.patch","merged_at":null},"body":"this PR add the BLEU score metric to the lib. It can be tested by running the following code.\r\n\r\n` from nlp.metrics import bleu\r\n\r\nhyp1 = \"It is a guide to action which ensures that the military always obeys the commands of the party\"\r\nref1a = \"It is a guide to action that ensures that the military  forces always being under the commands of the party \"\r\n ref1b = \"It is the guiding principle which guarantees the military force always being under the command of the Party\"\r\nref1c = \"It is the practical guide for the army always to heed the directions of the party\"\r\n    \r\nlist_of_references = [[ref1a, ref1b, ref1c]]\r\nhypotheses = [hyp1]\r\nbleu = bleu.bleu_score(list_of_references, hypotheses,4, smooth=True)\r\nprint(bleu) `","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/22\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/22\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/21","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/21\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/21\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/21\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/21","id":607914185,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA5Nzk2MTM4","number":21,"title":"Cleanup Features - Updating convert command - Fix Download manager","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2020-04-27T23:16:55Z","updated_at":"2020-05-01T09:29:47Z","closed_at":"2020-05-01T09:29:46Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/21","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/21","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/21.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/21.patch","merged_at":"2020-05-01T09:29:46Z"},"body":"This PR makes a number of changes:\r\n\r\n# Updating `Features`\r\n\r\nFeatures are a complex mechanism provided in `tfds` to be able to modify a dataset on-the-fly when serializing to disk and when loading from disk.\r\n\r\nWe don't really need this because (1) it hides too much from the user and (2) our datatype can be directly mapped to Arrow tables on drive so we usually don't need to change the format before\/after serialization.\r\n\r\nThis PR extracts and refactors these features in a single `features.py` files. It still keep a number of features classes for easy compatibility with tfds, namely the `Sequence`, `Tensor`, `ClassLabel` and `Translation` features.\r\n\r\nSome more complex features involving a pre-processing on-the-fly during serialization are kept:\r\n- `ClassLabel` which are able to convert from label strings to integers,\r\n- `Translation`which does some check on the languages.\r\n\r\n# Updating the `convert` command\r\n\r\nWe do a few updates here\r\n- following the simplification of the `features` (cf above), conversion are updated\r\n- we also makes it simpler to convert a single file\r\n- some code need to be fixed manually after conversion (e.g. to remove some encoding processing in former tfds `Text` features. We highlight this code with a \"git merge conflict\" style syntax for easy manual fixing.\r\n\r\n# Fix download manager iterator\r\n\r\nYou kept me up quite late on Tuesday night with this `os.scandir` change @lhoestq ;-)\r\n","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/21\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/21\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/20","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/20\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/20\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/20\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/20","id":607313557,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA5MzEyMDI1","number":20,"title":"remove boto3 and promise dependencies","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-27T07:39:45Z","updated_at":"2020-04-27T16:04:17Z","closed_at":"2020-04-27T14:15:45Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/20","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/20","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/20.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/20.patch","merged_at":"2020-04-27T14:15:45Z"},"body":"With the new download manager, we don't need `promise` anymore.\r\nI also removed `boto3` as in [this pr](https:\/\/github.com\/huggingface\/transformers\/pull\/3968)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/20\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/20\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/19","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/19\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/19\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/19\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/19","id":606400645,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA4NjIwMjUw","number":19,"title":"Replace tf.constant for TF","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-04-24T15:32:06Z","updated_at":"2020-04-29T09:27:08Z","closed_at":"2020-04-25T21:18:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/19","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/19","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/19.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/19.patch","merged_at":"2020-04-25T21:18:45Z"},"body":"Replace simple tf.constant type of Tensor to tf.ragged.constant which allows to have examples of different size in a tf.data.Dataset.\r\n\r\nNow the training works with TF. Here the same example than for the PT in collab:\r\n\r\n```python\r\nimport tensorflow as tf\r\nimport nlp\r\nfrom transformers import BertTokenizerFast, TFBertForQuestionAnswering\r\n\r\n# Load our training dataset and tokenizer\r\ntrain_dataset = nlp.load('squad', split=\"train[:1%]\")\r\ntokenizer = BertTokenizerFast.from_pretrained('bert-base-cased')\r\n\r\ndef get_correct_alignement(context, answer):\r\n    start_idx = answer['answer_start'][0]\r\n    text = answer['text'][0]\r\n    end_idx = start_idx + len(text)\r\n    if context[start_idx:end_idx] == text:\r\n        return start_idx, end_idx       # When the gold label position is good\r\n    elif context[start_idx-1:end_idx-1] == text:\r\n        return start_idx-1, end_idx-1   # When the gold label is off by one character\r\n    elif context[start_idx-2:end_idx-2] == text:\r\n        return start_idx-2, end_idx-2   # When the gold label is off by two character\r\n    else:\r\n        raise ValueError()\r\n\r\n# Tokenize our training dataset\r\ndef convert_to_features(example_batch):\r\n    # Tokenize contexts and questions (as pairs of inputs)\r\n    input_pairs = list(zip(example_batch['context'], example_batch['question']))\r\n    encodings = tokenizer.batch_encode_plus(input_pairs, pad_to_max_length=True)\r\n\r\n    # Compute start and end tokens for labels using Transformers's fast tokenizers alignement methods.\r\n    start_positions, end_positions = [], []\r\n    for i, (context, answer) in enumerate(zip(example_batch['context'], example_batch['answers'])):\r\n        start_idx, end_idx = get_correct_alignement(context, answer)\r\n        start_positions.append([encodings.char_to_token(i, start_idx)])\r\n        end_positions.append([encodings.char_to_token(i, end_idx-1)])\r\n    \r\n    if start_positions and end_positions:\r\n      encodings.update({'start_positions': start_positions,\r\n                        'end_positions': end_positions})\r\n    return encodings\r\n\r\ntrain_dataset = train_dataset.map(convert_to_features, batched=True)\r\n\r\ncolumns = ['input_ids', 'token_type_ids', 'attention_mask', 'start_positions', 'end_positions']\r\ntrain_dataset.set_format(type='tensorflow', columns=columns)\r\nfeatures = {x: train_dataset[x] for x in columns[:3]} \r\nlabels = {\"output_1\": train_dataset[\"start_positions\"]}\r\nlabels[\"output_2\"] = train_dataset[\"end_positions\"]\r\ntfdataset = tf.data.Dataset.from_tensor_slices((features, labels)).batch(8)\r\nmodel = TFBertForQuestionAnswering.from_pretrained(\"bert-base-cased\")\r\nloss_fn = tf.keras.losses.SparseCategoricalCrossentropy(reduction=tf.keras.losses.Reduction.NONE, from_logits=True)\r\nopt = tf.keras.optimizers.Adam(learning_rate=3e-5)\r\nmodel.compile(optimizer=opt,\r\n              loss={'output_1': loss_fn, 'output_2': loss_fn},\r\n              loss_weights={'output_1': 1., 'output_2': 1.},\r\n              metrics=['accuracy'])\r\nmodel.fit(tfdataset, epochs=1, steps_per_epoch=3)\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/19\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/19\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/18","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/18\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/18\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/18\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/18","id":606109196,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA4Mzg0MTc3","number":18,"title":"Updating caching mechanism - Allow dependency in dataset processing scripts - Fix style and quality in the repo","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-04-24T07:39:48Z","updated_at":"2020-04-29T15:27:28Z","closed_at":"2020-04-28T16:06:28Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/18","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/18","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/18.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/18.patch","merged_at":"2020-04-28T16:06:28Z"},"body":"This PR has a lot of content (might be hard to review, sorry, in particular because I fixed the style in the repo at the same time).\r\n\r\n# Style & quality:\r\nYou can now install the style and quality tools with `pip install -e .[quality]`. This will install black, the compatible version of sort and flake8.\r\nYou can then clean the style and check the quality before merging your PR with:\r\n```bash\r\nmake style\r\nmake quality\r\n```\r\n\r\n# Allow dependencies in dataset processing scripts\r\nWe can now allow (some level) of imports in dataset processing scripts (in addition to PyPi imports).\r\nNamely, you can do the two following things:\r\n\r\nImport from a relative path to a file in the same folder as the dataset processing script:\r\n```python\r\nimport .c4_utils \r\n``` \r\n\r\nOr import from a relative path to a file in a folder\/archive\/github repo to which you provide an URL after the import state with `# From: [URL]`:\r\n```python\r\nimport .clicr.dataset_code.build_json_dataset  # From: https:\/\/github.com\/clips\/clicr\r\n```\r\n\r\nIn both these cases, after downloading the main dataset processing script, we will identify the location of these dependencies, download them and copy them in the dataset processing script folder.\r\n\r\nNote that only direct import in the dataset processing script will be handled.\r\nWe don't recursively explore the additional import to download further files.\r\nAlso, when we download from an additional directory (in the second case above), we recursively add `__init__.py` to all the sub-folder so you can import from them.\r\n\r\nThis part is still tested for now. If you've seen datasets which required external utilities, tell me and I can test it.\r\n\r\n# Update the cache to have a better local structure\r\n\r\nThe local structure in the `src\/datasets` folder is now: `src\/datasets\/DATASET_NAME\/DATASET_HASH\/*`\r\n\r\nThe hash is computed from the full code of the dataset processing script as well as all the local and downloaded dependencies as mentioned above. This way if you change some code in a utility related to your dataset, a new hash should be computed.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/18\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/18\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/17","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/17\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/17\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/17\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/17","id":605753027,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA4MDk3NjM0","number":17,"title":"Add Pandas as format type","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-23T18:20:14Z","updated_at":"2020-04-27T18:07:50Z","closed_at":"2020-04-27T18:07:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/17","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/17","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/17.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/17.patch","merged_at":"2020-04-27T18:07:48Z"},"body":"As detailed in the title ^^","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/17\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/17\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/16","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/16\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/16\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/16\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/16","id":605661462,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA4MDIyMTUz","number":16,"title":"create our own DownloadManager","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2020-04-23T16:08:07Z","updated_at":"2021-05-05T18:25:24Z","closed_at":"2020-04-25T21:25:10Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/16","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/16","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/16.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/16.patch","merged_at":"2020-04-25T21:25:10Z"},"body":"I tried to create our own - and way simpler - download manager, by replacing all the complicated stuff with our own `cached_path` solution.\r\nWith this implementation, I tried `dataset = nlp.load('squad')` and it seems to work fine.\r\n\r\nFor the implementation, what I did exactly:\r\n- I copied the old download manager\r\n- I removed all the dependences to the old `download` files\r\n- I replaced all the download + extract calls by calls to `cached_path`\r\n- I removed unused parameters (extract_dir, compute_stats) (maybe compute_stats could be re-added later if we want to compute stats...)\r\n- I left some functions unimplemented for now. We will probably have to implement them because they are used by some datasets scripts (download_kaggle_data, iter_archive) or because we may need them at some point (download_checksums, _record_sizes_checksums)\r\n\r\nLet me know if you think that this is going the right direction or if you have remarks.\r\nNote: I didn't write any test yet as I wanted to read your remarks first","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/16\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/16\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/15","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/15\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/15\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/15\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/15","id":604906708,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA3NDEwOTk3","number":15,"title":"[Tests] General Test Design for all dataset scripts","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2020-04-22T16:46:01Z","updated_at":"2020-04-27T15:01:08Z","closed_at":"2020-04-27T14:48:02Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/15","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/15","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/15.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/15.patch","merged_at":"2020-04-27T14:48:02Z"},"body":"The general idea is similar to how testing is done in `transformers`. There is one general `test_dataset_common.py` file which has a `DatasetTesterMixin` class. This class implements all of the logic that can be used in a generic way for all dataset classes. The idea is to keep each individual dataset test file as minimal as possible. \r\n\r\nIn order to test whether the specific data set class can download the data and generate the examples **without** downloading the actual data all the time, a MockDataLoaderManager class is used which receives a `mock_folder_structure_fn` function from each individual dataset test file that create \"fake\" data and which returns the same folder structure that would have been created when using the real data downloader. ","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/15\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/15\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/14","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/14\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/14\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/14\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/14","id":604761315,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA3MjkzNjU5","number":14,"title":"[Download] Only create dir if not already exist","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-22T13:32:51Z","updated_at":"2020-04-23T08:27:33Z","closed_at":"2020-04-23T08:27:33Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/14","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/14","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/14.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/14.patch","merged_at":"2020-04-23T08:27:33Z"},"body":"This was quite annoying to find out :D. \r\nSome datasets have save in the same directory. So we should only create a new directory if it doesn't already exist.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/14\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/14\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/13","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/13\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/13\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/13\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/13","id":604547951,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA3MTIxMjkw","number":13,"title":"[Make style]","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-04-22T08:10:06Z","updated_at":"2020-04-23T13:02:22Z","closed_at":"2020-04-23T13:02:22Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/13","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/13","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/13.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/13.patch","merged_at":"2020-04-23T13:02:22Z"},"body":"Added Makefile and applied make style to all. \r\nmake style runs the following code:\r\n\r\n```\r\nstyle:\r\n          black --line-length 119 --target-version py35 src\r\n          isort --recursive src\r\n```\r\n\r\nIt's the same code that is run in `transformers`.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/13\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/13\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/12","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/12\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/12\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/12\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/12","id":604518583,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA3MDk3MzA4","number":12,"title":"[Map Function] add assert statement if map function does not return dict or None","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-04-22T07:21:24Z","updated_at":"2020-10-08T06:31:41Z","closed_at":"2020-04-24T06:29:03Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/12","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/12","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/12.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/12.patch","merged_at":"2020-04-24T06:29:03Z"},"body":"IMO, if a function is provided that is not a print statement (-> returns variable of type `None`) or a function that updates the datasets (-> returns variable of type `dict`), then a `TypeError` should be raised. \r\n\r\nNot sure whether you had cases in mind where the user should do something else @thomwolf , but I think a lot of silent errors can be avoided with this assert statement.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/12\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/12\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/11","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/11\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/11\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/11\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/11","id":603921624,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA2NjExODk2","number":11,"title":"[Convert TFDS to HFDS] Extend script to also allow just converting a single file","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-21T11:25:33Z","updated_at":"2020-04-21T20:47:01Z","closed_at":"2020-04-21T20:47:00Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/11","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/11","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/11.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/11.patch","merged_at":"2020-04-21T20:47:00Z"},"body":"Adds another argument to be able to convert only a single file","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/11\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/11\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/10","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/10\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/10\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/10\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/10","id":603909327,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA2NjAxNzQ2","number":10,"title":"Name json file \"squad.json\" instead of \"squad.py.json\"","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-21T11:04:28Z","updated_at":"2020-04-21T20:48:06Z","closed_at":"2020-04-21T20:48:06Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/10","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/10","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/10.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/10.patch","merged_at":"2020-04-21T20:48:06Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/10\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/10\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/9","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/9\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/9\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/9\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/9","id":603894874,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA2NTkwMDQw","number":9,"title":"[Clean up] Datasets","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2020-04-21T10:39:56Z","updated_at":"2020-04-21T20:49:58Z","closed_at":"2020-04-21T20:49:58Z","author_association":"MEMBER","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/9","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/9","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/9.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/9.patch","merged_at":"2020-04-21T20:49:58Z"},"body":"Clean up `nlp\/datasets` folder. \r\n\r\nAs I understood, eventually the `nlp\/datasets` shall not exist anymore at all. \r\n\r\nThe folder `nlp\/datasets\/nlp` is kept for the moment, but won't be needed in the future, since it will live on S3 (actually it already does) at: `https:\/\/s3.console.aws.amazon.com\/s3\/buckets\/datasets.huggingface.co\/nlp\/?region=us-east-1` and the different `dataset downloader scripts will be added to `nlp\/src\/nlp` when downloaded by the user. \r\n\r\nThe folder `nlp\/datasets\/checksums` is kept for now, but won't be needed anymore in the future. \r\n\r\nThe remaining folders\/ files are leftovers from tensorflow-datasets and are not needed. The can be looked up in the private tensorflow-dataset repo.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/9\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/9\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/8","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/8\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/8\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/8\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/8","id":601783243,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA0OTg0NDUz","number":8,"title":"Fix issue 6: error when the citation is missing in the DatasetInfo","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-17T08:04:26Z","updated_at":"2020-04-29T09:27:11Z","closed_at":"2020-04-20T13:24:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/8","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/8","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/8.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/8.patch","merged_at":"2020-04-20T13:24:12Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/8\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/8\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/7","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/7\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/7\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/7\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/7","id":601780534,"node_id":"MDExOlB1bGxSZXF1ZXN0NDA0OTgyMzA2","number":7,"title":"Fix issue 5: allow empty datasets","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-17T07:59:56Z","updated_at":"2020-04-29T09:27:13Z","closed_at":"2020-04-20T13:23:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/7","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/7","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/7.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/7.patch","merged_at":"2020-04-20T13:23:47Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/7\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/7\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/6","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/6\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/6\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/6\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/6","id":600330836,"node_id":"MDU6SXNzdWU2MDAzMzA4MzY=","number":6,"title":"Error when citation is not given in the DatasetInfo","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-04-15T14:14:54Z","updated_at":"2020-04-29T09:23:22Z","closed_at":"2020-04-29T09:23:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"The following error is raised when the `citation` parameter is missing when we instantiate a `DatasetInfo`:\r\n```\r\nTraceback (most recent call last):\r\n  File \"\", line 1, in \r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/info.py\", line 338, in __repr__\r\n    citation_pprint = _indent('\"\"\"{}\"\"\"'.format(self.citation.strip()))\r\nAttributeError: 'NoneType' object has no attribute 'strip'\r\n```\r\n\r\nI propose to do the following change in the `info.py` file. The method:\r\n```python\r\ndef __repr__(self):\r\n        splits_pprint = _indent(\"\\n\".join([\"{\"] + [\r\n                \"    '{}': {},\".format(k, split.num_examples)\r\n                for k, split in sorted(self.splits.items())\r\n        ] + [\"}\"]))\r\n        features_pprint = _indent(repr(self.features))\r\n        citation_pprint = _indent('\"\"\"{}\"\"\"'.format(self.citation.strip()))\r\n        return INFO_STR.format(\r\n                name=self.name,\r\n                version=self.version,\r\n                description=self.description,\r\n                total_num_examples=self.splits.total_num_examples,\r\n                features=features_pprint,\r\n                splits=splits_pprint,\r\n                citation=citation_pprint,\r\n                homepage=self.homepage,\r\n                supervised_keys=self.supervised_keys,\r\n                # Proto add a \\n that we strip.\r\n                license=str(self.license).strip())\r\n```\r\nBecomes:\r\n```python\r\ndef __repr__(self):\r\n        splits_pprint = _indent(\"\\n\".join([\"{\"] + [\r\n                \"    '{}': {},\".format(k, split.num_examples)\r\n                for k, split in sorted(self.splits.items())\r\n        ] + [\"}\"]))\r\n        features_pprint = _indent(repr(self.features))\r\n        ## the strip is done only is the citation is given\r\n        citation_pprint = self.citation\r\n\r\n        if self.citation:\r\n            citation_pprint = _indent('\"\"\"{}\"\"\"'.format(self.citation.strip()))\r\n        return INFO_STR.format(\r\n                name=self.name,\r\n                version=self.version,\r\n                description=self.description,\r\n                total_num_examples=self.splits.total_num_examples,\r\n                features=features_pprint,\r\n                splits=splits_pprint,\r\n                citation=citation_pprint,\r\n                homepage=self.homepage,\r\n                supervised_keys=self.supervised_keys,\r\n                # Proto add a \\n that we strip.\r\n                license=str(self.license).strip())\r\n```\r\nAnd now it is ok. @thomwolf are you ok with this fix?","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/6\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/6\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/5","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/5\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/5\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/5\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/5","id":600295889,"node_id":"MDU6SXNzdWU2MDAyOTU4ODk=","number":5,"title":"ValueError when a split is empty","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-04-15T13:25:13Z","updated_at":"2020-04-29T09:23:05Z","closed_at":"2020-04-29T09:23:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"When a split is empty either TEST, VALIDATION or TRAIN I get the following error:\r\n```\r\nTraceback (most recent call last):\r\n  File \"\", line 1, in \r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/load.py\", line 295, in load\r\n    ds = dbuilder.as_dataset(**as_dataset_kwargs)\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/builder.py\", line 587, in as_dataset\r\n    datasets = utils.map_nested(build_single_dataset, split, map_tuple=True)\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/utils\/py_utils.py\", line 158, in map_nested\r\n    for k, v in data_struct.items()\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/utils\/py_utils.py\", line 158, in \r\n    for k, v in data_struct.items()\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/utils\/py_utils.py\", line 172, in map_nested\r\n    return function(data_struct)\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/builder.py\", line 601, in _build_single_dataset\r\n    split=split,\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/builder.py\", line 625, in _as_dataset\r\n    split_infos=self.info.splits.values(),\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/arrow_reader.py\", line 200, in read\r\n    return py_utils.map_nested(_read_instruction_to_ds, instructions)\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/utils\/py_utils.py\", line 172, in map_nested\r\n    return function(data_struct)\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/arrow_reader.py\", line 191, in _read_instruction_to_ds\r\n    file_instructions = make_file_instructions(name, split_infos, instruction)\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/arrow_reader.py\", line 104, in make_file_instructions\r\n    absolute_instructions=absolute_instructions,\r\n  File \"\/home\/jplu\/dev\/jplu\/datasets\/src\/nlp\/arrow_reader.py\", line 122, in _make_file_instructions_from_absolutes\r\n    'Split empty. This might means that dataset hasn\\'t been generated '\r\nValueError: Split empty. This might means that dataset hasn't been generated yet and info not restored from GCS, or that legacy dataset is used.\r\n``` \r\n\r\nHow to reproduce:\r\n```python\r\nimport csv\r\n\r\nimport nlp\r\n\r\n\r\nclass Bbc(nlp.GeneratorBasedBuilder):\r\n    VERSION = nlp.Version(\"1.0.0\")\r\n\r\n    def __init__(self, **config):\r\n        self.train = config.pop(\"train\", None)\r\n        self.validation = config.pop(\"validation\", None)\r\n        super(Bbc, self).__init__(**config)\r\n\r\n    def _info(self):\r\n        return nlp.DatasetInfo(builder=self, description=\"bla\", features=nlp.features.FeaturesDict({\"id\": nlp.int32, \"text\": nlp.string, \"label\": nlp.string}))\r\n\r\n    def _split_generators(self, dl_manager):\r\n        return [nlp.SplitGenerator(name=nlp.Split.TRAIN, gen_kwargs={\"filepath\": self.train}),\r\n                nlp.SplitGenerator(name=nlp.Split.VALIDATION, gen_kwargs={\"filepath\": self.validation}),\r\n                nlp.SplitGenerator(name=nlp.Split.TEST, gen_kwargs={\"filepath\": None})]\r\n\r\n    def _generate_examples(self, filepath):\r\n        if not filepath:\r\n            return None, {}\r\n\r\n        with open(filepath) as f:\r\n            reader = csv.reader(f, delimiter=',', quotechar=\"\\\"\")\r\n            lines = list(reader)[1:]\r\n\r\n            for idx, line in enumerate(lines):\r\n                yield idx, {\"id\": idx, \"text\": line[1], \"label\": line[0]}\r\n```\r\n\r\n```python\r\nimport nlp\r\ndataset = nlp.load(\"bbc\", builder_kwargs={\"train\": \"bbc\/data\/train.csv\", \"validation\": \"bbc\/data\/test.csv\"})\r\n```","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/5\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/5\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/4","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/4\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/4\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/4\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/4","id":600185417,"node_id":"MDU6SXNzdWU2MDAxODU0MTc=","number":4,"title":"[Feature] Keep the list of labels of a dataset as metadata","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2020-04-15T10:17:10Z","updated_at":"2020-07-08T16:59:46Z","closed_at":"2020-05-04T06:11:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"It would be useful to keep the list of the labels of a dataset as metadata. Either directly in the `DatasetInfo` or in the Arrow metadata.","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/4\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/4\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/3","id":600180050,"node_id":"MDU6SXNzdWU2MDAxODAwNTA=","number":3,"title":"[Feature] More dataset outputs","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2020-04-15T10:08:14Z","updated_at":"2020-05-04T06:12:27Z","closed_at":"2020-05-04T06:12:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Add the following dataset outputs:\r\n\r\n- Spark\r\n- Pandas","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/3\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2","id":599767671,"node_id":"MDU6SXNzdWU1OTk3Njc2NzE=","number":2,"title":"Issue to read a local dataset","user":{"login":"jplu","id":959590,"node_id":"MDQ6VXNlcjk1OTU5MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/959590?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jplu","html_url":"https:\/\/github.com\/jplu","followers_url":"https:\/\/api.github.com\/users\/jplu\/followers","following_url":"https:\/\/api.github.com\/users\/jplu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jplu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jplu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jplu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jplu\/orgs","repos_url":"https:\/\/api.github.com\/users\/jplu\/repos","events_url":"https:\/\/api.github.com\/users\/jplu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jplu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2020-04-14T18:18:51Z","updated_at":"2020-05-11T18:55:23Z","closed_at":"2020-05-11T18:55:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":null,"pull_request":null,"body":"Hello,\r\n\r\nAs proposed by @thomwolf, I open an issue to explain what I'm trying to do without success. What I want to do is to create and load a local dataset, the script I have done is the following:\r\n```python\r\nimport os\r\nimport csv\r\n\r\nimport nlp\r\n\r\n\r\nclass BbcConfig(nlp.BuilderConfig):\r\n    def __init__(self, **kwargs):\r\n        super(BbcConfig, self).__init__(**kwargs)\r\n\r\n\r\nclass Bbc(nlp.GeneratorBasedBuilder):\r\n    _DIR = \".\/data\"\r\n    _DEV_FILE = \"test.csv\"\r\n    _TRAINING_FILE = \"train.csv\"\r\n\r\n    BUILDER_CONFIGS = [BbcConfig(name=\"bbc\", version=nlp.Version(\"1.0.0\"))]\r\n\r\n    def _info(self):\r\n        return nlp.DatasetInfo(builder=self, features=nlp.features.FeaturesDict({\"id\": nlp.string, \"text\": nlp.string, \"label\": nlp.string}))\r\n\r\n    def _split_generators(self, dl_manager):\r\n        files = {\"train\": os.path.join(self._DIR, self._TRAINING_FILE), \"dev\": os.path.join(self._DIR, self._DEV_FILE)}\r\n\r\n        return [nlp.SplitGenerator(name=nlp.Split.TRAIN, gen_kwargs={\"filepath\": files[\"train\"]}),\r\n                nlp.SplitGenerator(name=nlp.Split.VALIDATION, gen_kwargs={\"filepath\": files[\"dev\"]})]\r\n\r\n    def _generate_examples(self, filepath):\r\n        with open(filepath) as f:\r\n            reader = csv.reader(f, delimiter=',', quotechar=\"\\\"\")\r\n            lines = list(reader)[1:]\r\n\r\n            for idx, line in enumerate(lines):\r\n                yield idx, {\"idx\": idx, \"text\": line[1], \"label\": line[0]}\r\n\r\n```\r\n\r\nThe dataset is attached to this issue as well:\r\n[data.zip](https:\/\/github.com\/huggingface\/datasets\/files\/4476928\/data.zip)\r\n\r\nNow the steps to reproduce what I would like to do:\r\n1. unzip data locally (I know the nlp lib can detect and extract archives but I want to reduce and facilitate the reproduction as much as possible)\r\n2. create the `bbc.py` script as above at the same location than the unziped `data` folder.\r\n\r\nNow I try to load the dataset in three different ways and none works, the first one with the name of the dataset like I would do with TFDS:\r\n```python\r\nimport nlp\r\nfrom bbc import Bbc\r\ndataset = nlp.load(\"bbc\")\r\n```\r\n\r\nI get:\r\n```\r\nTraceback (most recent call last):\r\n  File \"\", line 1, in \r\n  File \"\/opt\/anaconda3\/envs\/transformers\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 280, in load\r\n    dbuilder: DatasetBuilder = builder(path, name, data_dir=data_dir, **builder_kwargs)\r\n  File \"\/opt\/anaconda3\/envs\/transformers\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 166, in builder\r\n    builder_cls = load_dataset(path, name=name, **builder_kwargs)\r\n  File \"\/opt\/anaconda3\/envs\/transformers\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 88, in load_dataset\r\n    local_files_only=local_files_only,\r\n  File \"\/opt\/anaconda3\/envs\/transformers\/lib\/python3.7\/site-packages\/nlp\/utils\/file_utils.py\", line 214, in cached_path\r\n    if not is_zipfile(output_path) and not tarfile.is_tarfile(output_path):\r\n  File \"\/opt\/anaconda3\/envs\/transformers\/lib\/python3.7\/zipfile.py\", line 203, in is_zipfile\r\n    with open(filename, \"rb\") as fp:\r\nTypeError: expected str, bytes or os.PathLike object, not NoneType\r\n```\r\n\r\nBut @thomwolf told me that no need to import the script, just put the path of it, then I tried three different way to do:\r\n```python\r\nimport nlp\r\ndataset = nlp.load(\"bbc.py\")\r\n```\r\nAnd\r\n```python\r\nimport nlp\r\ndataset = nlp.load(\".\/bbc.py\")\r\n```\r\nAnd\r\n```python\r\nimport nlp\r\ndataset = nlp.load(\"\/absolute\/path\/to\/bbc.py\")\r\n```\r\n\r\nThese three ways gives me:\r\n```\r\nTraceback (most recent call last):\r\n  File \"\", line 1, in \r\n  File \"\/opt\/anaconda3\/envs\/transformers\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 280, in load\r\n    dbuilder: DatasetBuilder = builder(path, name, data_dir=data_dir, **builder_kwargs)\r\n  File \"\/opt\/anaconda3\/envs\/transformers\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 166, in builder\r\n    builder_cls = load_dataset(path, name=name, **builder_kwargs)\r\n  File \"\/opt\/anaconda3\/envs\/transformers\/lib\/python3.7\/site-packages\/nlp\/load.py\", line 124, in load_dataset\r\n    dataset_module = importlib.import_module(module_path)\r\n  File \"\/opt\/anaconda3\/envs\/transformers\/lib\/python3.7\/importlib\/__init__.py\", line 127, in import_module\r\n    return _bootstrap._gcd_import(name[level:], package, level)\r\n  File \"\", line 1006, in _gcd_import\r\n  File \"\", line 983, in _find_and_load\r\n  File \"\", line 965, in _find_and_load_unlocked\r\nModuleNotFoundError: No module named 'nlp.datasets.2fd72627d92c328b3e9c4a3bf7ec932c48083caca09230cebe4c618da6e93688.bbc'\r\n```\r\nAny idea of what I'm missing? or I might have spot a bug :)","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2\/reactions","total_count":1,"+1":1,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2\/timeline","performed_via_github_app":null}
{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1","id":599457467,"node_id":"MDExOlB1bGxSZXF1ZXN0NDAzMDk1NDYw","number":1,"title":"changing nlp.bool to nlp.bool_","user":{"login":"mariamabarham","id":38249783,"node_id":"MDQ6VXNlcjM4MjQ5Nzgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38249783?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariamabarham","html_url":"https:\/\/github.com\/mariamabarham","followers_url":"https:\/\/api.github.com\/users\/mariamabarham\/followers","following_url":"https:\/\/api.github.com\/users\/mariamabarham\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariamabarham\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariamabarham\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariamabarham\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariamabarham\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariamabarham\/repos","events_url":"https:\/\/api.github.com\/users\/mariamabarham\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariamabarham\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2020-04-14T10:18:02Z","updated_at":"2020-04-14T12:01:40Z","closed_at":"2020-04-14T12:01:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"draft":false,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1.patch","merged_at":"2020-04-14T12:01:40Z"},"body":"","reactions":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1\/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"timeline_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1\/timeline","performed_via_github_app":null}